From d3f8eabb9b2571eca62b49f9160594735272862d Mon Sep 17 00:00:00 2001 From: Becca Petrin Date: Thu, 16 Aug 2018 12:17:49 -0700 Subject: [PATCH] Add alicloud auth (#5123) * add alicloud auth commands * add dependencies --- command/commands.go | 3 + .../aliyun/alibaba-cloud-sdk-go/LICENSE | 201 +++ .../sdk/auth/credential.go | 18 + .../auth/credentials/access_key_credential.go | 34 + .../sdk/auth/credentials/ecs_ram_role.go | 29 + .../credentials/rsa_key_pair_credential.go | 15 + .../sdk/auth/credentials/sts_credential.go | 15 + .../credentials/sts_role_arn_credential.go | 49 + .../sdk/auth/roa_signature_composer.go | 121 ++ .../sdk/auth/rpc_signature_composer.go | 96 ++ .../alibaba-cloud-sdk-go/sdk/auth/signer.go | 95 ++ .../sdk/auth/signers/algorithms.go | 63 + .../sdk/auth/signers/credential_updater.go | 53 + .../sdk/auth/signers/session_credential.go | 7 + .../sdk/auth/signers/signer_access_key.go | 58 + .../sdk/auth/signers/signer_ecs_ram_role.go | 175 +++ .../sdk/auth/signers/signer_key_pair.go | 148 ++ .../sdk/auth/signers/signer_ram_role_arn.go | 174 +++ .../sdk/auth/signers/signer_sts_token.go | 58 + .../sdk/auth/signers/signer_v2.go | 58 + .../aliyun/alibaba-cloud-sdk-go/sdk/client.go | 417 +++++ .../aliyun/alibaba-cloud-sdk-go/sdk/config.go | 85 + .../sdk/endpoints/endpoints_config.go | 505 ++++++ .../sdk/endpoints/local_global_resolver.go | 37 + .../sdk/endpoints/local_regional_resolver.go | 41 + .../sdk/endpoints/location_resolver.go | 139 ++ .../sdk/endpoints/mapping_resolver.go | 39 + .../sdk/endpoints/resolver.go | 80 + .../sdk/endpoints/simple_host_resolver.go | 25 + .../sdk/errors/client_error.go | 92 ++ .../alibaba-cloud-sdk-go/sdk/errors/error.go | 23 + .../sdk/errors/server_error.go | 122 ++ .../signature_does_not_match_wrapper.go | 29 + .../sdk/requests/acs_reqeust.go | 308 ++++ .../sdk/requests/common_request.go | 128 ++ .../sdk/requests/roa_request.go | 144 ++ .../sdk/requests/rpc_request.go | 81 + .../sdk/requests/types.go | 53 + .../sdk/responses/json_parser.go | 341 ++++ .../sdk/responses/response.go | 137 ++ .../alibaba-cloud-sdk-go/sdk/utils/utils.go | 117 ++ .../services/sts/assume_role.go | 108 ++ .../services/sts/client.go | 81 + .../sts/generate_session_access_key.go | 104 ++ .../services/sts/get_caller_identity.go | 108 ++ .../services/sts/struct_assumed_role_user.go | 22 + .../services/sts/struct_credentials.go | 24 + .../services/sts/struct_session_access_key.go | 23 + .../vault-plugin-auth-alicloud/Gopkg.lock | 409 +++++ .../vault-plugin-auth-alicloud/Gopkg.toml | 46 + .../vault-plugin-auth-alicloud/LICENSE | 373 +++++ .../vault-plugin-auth-alicloud/Makefile | 55 + .../vault-plugin-auth-alicloud/README.md | 127 ++ .../vault-plugin-auth-alicloud/arn.go | 94 ++ .../vault-plugin-auth-alicloud/backend.go | 57 + .../vault-plugin-auth-alicloud/cli.go | 75 + .../vault-plugin-auth-alicloud/path_login.go | 248 +++ .../vault-plugin-auth-alicloud/path_role.go | 228 +++ .../vault-plugin-auth-alicloud/role_entry.go | 31 + .../vault-plugin-auth-alicloud/tools/tools.go | 79 + vendor/github.com/json-iterator/go/Gopkg.lock | 21 + vendor/github.com/json-iterator/go/Gopkg.toml | 26 + vendor/github.com/json-iterator/go/LICENSE | 21 + vendor/github.com/json-iterator/go/README.md | 91 ++ vendor/github.com/json-iterator/go/adapter.go | 148 ++ vendor/github.com/json-iterator/go/any.go | 321 ++++ .../github.com/json-iterator/go/any_array.go | 278 ++++ .../github.com/json-iterator/go/any_bool.go | 137 ++ .../github.com/json-iterator/go/any_float.go | 83 + .../github.com/json-iterator/go/any_int32.go | 74 + .../github.com/json-iterator/go/any_int64.go | 74 + .../json-iterator/go/any_invalid.go | 82 + vendor/github.com/json-iterator/go/any_nil.go | 69 + .../github.com/json-iterator/go/any_number.go | 123 ++ .../github.com/json-iterator/go/any_object.go | 374 +++++ vendor/github.com/json-iterator/go/any_str.go | 166 ++ .../github.com/json-iterator/go/any_uint32.go | 74 + .../github.com/json-iterator/go/any_uint64.go | 74 + vendor/github.com/json-iterator/go/build.sh | 12 + vendor/github.com/json-iterator/go/config.go | 372 +++++ .../go/fuzzy_mode_convert_table.md | 7 + vendor/github.com/json-iterator/go/iter.go | 322 ++++ .../github.com/json-iterator/go/iter_array.go | 58 + .../github.com/json-iterator/go/iter_float.go | 347 +++++ .../github.com/json-iterator/go/iter_int.go | 345 ++++ .../json-iterator/go/iter_object.go | 252 +++ .../github.com/json-iterator/go/iter_skip.go | 129 ++ .../json-iterator/go/iter_skip_sloppy.go | 144 ++ .../json-iterator/go/iter_skip_strict.go | 89 ++ .../github.com/json-iterator/go/iter_str.go | 215 +++ .../github.com/json-iterator/go/jsoniter.go | 18 + vendor/github.com/json-iterator/go/pool.go | 42 + vendor/github.com/json-iterator/go/reflect.go | 330 ++++ .../json-iterator/go/reflect_array.go | 104 ++ .../json-iterator/go/reflect_dynamic.go | 70 + .../json-iterator/go/reflect_extension.go | 471 ++++++ .../json-iterator/go/reflect_json_number.go | 112 ++ .../go/reflect_json_raw_message.go | 60 + .../json-iterator/go/reflect_map.go | 318 ++++ .../json-iterator/go/reflect_marshaler.go | 218 +++ .../json-iterator/go/reflect_native.go | 451 ++++++ .../json-iterator/go/reflect_optional.go | 133 ++ .../json-iterator/go/reflect_slice.go | 99 ++ .../go/reflect_struct_decoder.go | 1048 +++++++++++++ .../go/reflect_struct_encoder.go | 210 +++ vendor/github.com/json-iterator/go/stream.go | 211 +++ .../json-iterator/go/stream_float.go | 94 ++ .../github.com/json-iterator/go/stream_int.go | 190 +++ .../github.com/json-iterator/go/stream_str.go | 372 +++++ vendor/github.com/json-iterator/go/test.sh | 12 + .../github.com/modern-go/concurrent/LICENSE | 201 +++ .../github.com/modern-go/concurrent/README.md | 49 + .../modern-go/concurrent/executor.go | 14 + .../modern-go/concurrent/go_above_19.go | 15 + .../modern-go/concurrent/go_below_19.go | 33 + vendor/github.com/modern-go/concurrent/log.go | 13 + .../github.com/modern-go/concurrent/test.sh | 12 + .../concurrent/unbounded_executor.go | 119 ++ .../github.com/modern-go/reflect2/Gopkg.lock | 15 + .../github.com/modern-go/reflect2/Gopkg.toml | 35 + vendor/github.com/modern-go/reflect2/LICENSE | 201 +++ .../github.com/modern-go/reflect2/README.md | 71 + .../modern-go/reflect2/go_above_17.go | 8 + .../modern-go/reflect2/go_above_19.go | 14 + .../modern-go/reflect2/go_below_17.go | 9 + .../modern-go/reflect2/go_below_19.go | 14 + .../github.com/modern-go/reflect2/reflect2.go | 298 ++++ .../modern-go/reflect2/reflect2_amd64.s | 0 .../modern-go/reflect2/reflect2_kind.go | 30 + .../modern-go/reflect2/relfect2_386.s | 0 .../modern-go/reflect2/relfect2_amd64p32.s | 0 .../modern-go/reflect2/relfect2_arm.s | 0 .../modern-go/reflect2/relfect2_arm64.s | 0 .../modern-go/reflect2/relfect2_mips64x.s | 0 .../modern-go/reflect2/relfect2_mipsx.s | 0 .../modern-go/reflect2/relfect2_ppc64x.s | 0 .../modern-go/reflect2/relfect2_s390x.s | 0 .../modern-go/reflect2/safe_field.go | 58 + .../github.com/modern-go/reflect2/safe_map.go | 101 ++ .../modern-go/reflect2/safe_slice.go | 92 ++ .../modern-go/reflect2/safe_struct.go | 29 + .../modern-go/reflect2/safe_type.go | 78 + vendor/github.com/modern-go/reflect2/test.sh | 12 + .../github.com/modern-go/reflect2/type_map.go | 103 ++ .../modern-go/reflect2/unsafe_array.go | 65 + .../modern-go/reflect2/unsafe_eface.go | 59 + .../modern-go/reflect2/unsafe_field.go | 74 + .../modern-go/reflect2/unsafe_iface.go | 64 + .../modern-go/reflect2/unsafe_link.go | 70 + .../modern-go/reflect2/unsafe_map.go | 138 ++ .../modern-go/reflect2/unsafe_ptr.go | 46 + .../modern-go/reflect2/unsafe_slice.go | 177 +++ .../modern-go/reflect2/unsafe_struct.go | 59 + .../modern-go/reflect2/unsafe_type.go | 85 + vendor/github.com/satori/go.uuid/README.md | 17 +- vendor/github.com/satori/go.uuid/generator.go | 186 +-- .../golang.org/x/net/lex/httplex/httplex.go | 351 +++++ .../grpclb/grpc_lb_v1/messages/messages.pb.go | 615 ++++++++ .../grpclb/grpc_lb_v1/messages/messages.proto | 155 ++ .../grpc/transport/bdp_estimator.go | 140 ++ .../grpc/transport/control.go | 334 ++++ .../google.golang.org/grpc/transport/go16.go | 51 + .../google.golang.org/grpc/transport/go17.go | 52 + .../grpc/transport/handler_server.go | 448 ++++++ .../grpc/transport/http2_client.go | 1384 +++++++++++++++++ .../grpc/transport/http2_server.go | 1212 +++++++++++++++ .../grpc/transport/http_util.go | 530 +++++++ .../google.golang.org/grpc/transport/log.go | 50 + .../grpc/transport/transport.go | 778 +++++++++ vendor/vendor.json | 132 +- 170 files changed, 24570 insertions(+), 122 deletions(-) create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/LICENSE create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credential.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/access_key_credential.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/ecs_ram_role.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/rsa_key_pair_credential.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_credential.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_role_arn_credential.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/roa_signature_composer.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/rpc_signature_composer.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signer.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/algorithms.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/credential_updater.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/session_credential.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_access_key.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ecs_ram_role.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_key_pair.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ram_role_arn.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_sts_token.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_v2.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/client.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/config.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/endpoints_config.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_global_resolver.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_regional_resolver.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/location_resolver.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/mapping_resolver.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/resolver.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/simple_host_resolver.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/client_error.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/error.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/server_error.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/signature_does_not_match_wrapper.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/acs_reqeust.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/common_request.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/roa_request.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/rpc_request.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/types.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/json_parser.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/response.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils/utils.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/assume_role.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/client.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/generate_session_access_key.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/get_caller_identity.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_assumed_role_user.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_credentials.go create mode 100644 vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_session_access_key.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.lock create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.toml create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/LICENSE create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Makefile create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/README.md create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/arn.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/backend.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/cli.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_login.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_role.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/role_entry.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-auth-alicloud/tools/tools.go create mode 100644 vendor/github.com/json-iterator/go/Gopkg.lock create mode 100644 vendor/github.com/json-iterator/go/Gopkg.toml create mode 100644 vendor/github.com/json-iterator/go/LICENSE create mode 100644 vendor/github.com/json-iterator/go/README.md create mode 100644 vendor/github.com/json-iterator/go/adapter.go create mode 100644 vendor/github.com/json-iterator/go/any.go create mode 100644 vendor/github.com/json-iterator/go/any_array.go create mode 100644 vendor/github.com/json-iterator/go/any_bool.go create mode 100644 vendor/github.com/json-iterator/go/any_float.go create mode 100644 vendor/github.com/json-iterator/go/any_int32.go create mode 100644 vendor/github.com/json-iterator/go/any_int64.go create mode 100644 vendor/github.com/json-iterator/go/any_invalid.go create mode 100644 vendor/github.com/json-iterator/go/any_nil.go create mode 100644 vendor/github.com/json-iterator/go/any_number.go create mode 100644 vendor/github.com/json-iterator/go/any_object.go create mode 100644 vendor/github.com/json-iterator/go/any_str.go create mode 100644 vendor/github.com/json-iterator/go/any_uint32.go create mode 100644 vendor/github.com/json-iterator/go/any_uint64.go create mode 100755 vendor/github.com/json-iterator/go/build.sh create mode 100644 vendor/github.com/json-iterator/go/config.go create mode 100644 vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md create mode 100644 vendor/github.com/json-iterator/go/iter.go create mode 100644 vendor/github.com/json-iterator/go/iter_array.go create mode 100644 vendor/github.com/json-iterator/go/iter_float.go create mode 100644 vendor/github.com/json-iterator/go/iter_int.go create mode 100644 vendor/github.com/json-iterator/go/iter_object.go create mode 100644 vendor/github.com/json-iterator/go/iter_skip.go create mode 100644 vendor/github.com/json-iterator/go/iter_skip_sloppy.go create mode 100644 vendor/github.com/json-iterator/go/iter_skip_strict.go create mode 100644 vendor/github.com/json-iterator/go/iter_str.go create mode 100644 vendor/github.com/json-iterator/go/jsoniter.go create mode 100644 vendor/github.com/json-iterator/go/pool.go create mode 100644 vendor/github.com/json-iterator/go/reflect.go create mode 100644 vendor/github.com/json-iterator/go/reflect_array.go create mode 100644 vendor/github.com/json-iterator/go/reflect_dynamic.go create mode 100644 vendor/github.com/json-iterator/go/reflect_extension.go create mode 100644 vendor/github.com/json-iterator/go/reflect_json_number.go create mode 100644 vendor/github.com/json-iterator/go/reflect_json_raw_message.go create mode 100644 vendor/github.com/json-iterator/go/reflect_map.go create mode 100644 vendor/github.com/json-iterator/go/reflect_marshaler.go create mode 100644 vendor/github.com/json-iterator/go/reflect_native.go create mode 100644 vendor/github.com/json-iterator/go/reflect_optional.go create mode 100644 vendor/github.com/json-iterator/go/reflect_slice.go create mode 100644 vendor/github.com/json-iterator/go/reflect_struct_decoder.go create mode 100644 vendor/github.com/json-iterator/go/reflect_struct_encoder.go create mode 100644 vendor/github.com/json-iterator/go/stream.go create mode 100644 vendor/github.com/json-iterator/go/stream_float.go create mode 100644 vendor/github.com/json-iterator/go/stream_int.go create mode 100644 vendor/github.com/json-iterator/go/stream_str.go create mode 100755 vendor/github.com/json-iterator/go/test.sh create mode 100644 vendor/github.com/modern-go/concurrent/LICENSE create mode 100644 vendor/github.com/modern-go/concurrent/README.md create mode 100644 vendor/github.com/modern-go/concurrent/executor.go create mode 100644 vendor/github.com/modern-go/concurrent/go_above_19.go create mode 100644 vendor/github.com/modern-go/concurrent/go_below_19.go create mode 100644 vendor/github.com/modern-go/concurrent/log.go create mode 100755 vendor/github.com/modern-go/concurrent/test.sh create mode 100644 vendor/github.com/modern-go/concurrent/unbounded_executor.go create mode 100644 vendor/github.com/modern-go/reflect2/Gopkg.lock create mode 100644 vendor/github.com/modern-go/reflect2/Gopkg.toml create mode 100644 vendor/github.com/modern-go/reflect2/LICENSE create mode 100644 vendor/github.com/modern-go/reflect2/README.md create mode 100644 vendor/github.com/modern-go/reflect2/go_above_17.go create mode 100644 vendor/github.com/modern-go/reflect2/go_above_19.go create mode 100644 vendor/github.com/modern-go/reflect2/go_below_17.go create mode 100644 vendor/github.com/modern-go/reflect2/go_below_19.go create mode 100644 vendor/github.com/modern-go/reflect2/reflect2.go create mode 100644 vendor/github.com/modern-go/reflect2/reflect2_amd64.s create mode 100644 vendor/github.com/modern-go/reflect2/reflect2_kind.go create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_386.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_amd64p32.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_arm.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_arm64.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_mips64x.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_mipsx.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_ppc64x.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_s390x.s create mode 100644 vendor/github.com/modern-go/reflect2/safe_field.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_map.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_slice.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_struct.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_type.go create mode 100755 vendor/github.com/modern-go/reflect2/test.sh create mode 100644 vendor/github.com/modern-go/reflect2/type_map.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_array.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_eface.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_field.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_iface.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_link.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_map.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_ptr.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_slice.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_struct.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_type.go create mode 100644 vendor/golang.org/x/net/lex/httplex/httplex.go create mode 100644 vendor/google.golang.org/grpc/grpclb/grpc_lb_v1/messages/messages.pb.go create mode 100644 vendor/google.golang.org/grpc/grpclb/grpc_lb_v1/messages/messages.proto create mode 100644 vendor/google.golang.org/grpc/transport/bdp_estimator.go create mode 100644 vendor/google.golang.org/grpc/transport/control.go create mode 100644 vendor/google.golang.org/grpc/transport/go16.go create mode 100644 vendor/google.golang.org/grpc/transport/go17.go create mode 100644 vendor/google.golang.org/grpc/transport/handler_server.go create mode 100644 vendor/google.golang.org/grpc/transport/http2_client.go create mode 100644 vendor/google.golang.org/grpc/transport/http2_server.go create mode 100644 vendor/google.golang.org/grpc/transport/http_util.go create mode 100644 vendor/google.golang.org/grpc/transport/log.go create mode 100644 vendor/google.golang.org/grpc/transport/transport.go diff --git a/command/commands.go b/command/commands.go index e4e65e3272..8827c51a0a 100644 --- a/command/commands.go +++ b/command/commands.go @@ -35,6 +35,7 @@ import ( auditSocket "github.com/hashicorp/vault/builtin/audit/socket" auditSyslog "github.com/hashicorp/vault/builtin/audit/syslog" + credAliCloud "github.com/hashicorp/vault-plugin-auth-alicloud" credAzure "github.com/hashicorp/vault-plugin-auth-azure" credCentrify "github.com/hashicorp/vault-plugin-auth-centrify" credGcp "github.com/hashicorp/vault-plugin-auth-gcp/plugin" @@ -99,6 +100,7 @@ var ( } credentialBackends = map[string]logical.Factory{ + "alicloud": credAliCloud.Factory, "app-id": credAppId.Factory, "approle": credAppRole.Factory, "aws": credAws.Factory, @@ -206,6 +208,7 @@ var DeprecatedCommands map[string]cli.CommandFactory func initCommands(ui, serverCmdUi cli.Ui, runOpts *RunOptions) { loginHandlers := map[string]LoginHandler{ + "alicloud": &credAliCloud.CLIHandler{}, "aws": &credAws.CLIHandler{}, "centrify": &credCentrify.CLIHandler{}, "cert": &credCert.CLIHandler{}, diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/LICENSE b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credential.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credential.go new file mode 100644 index 0000000000..7f20b7a40c --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credential.go @@ -0,0 +1,18 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package auth + +type Credential interface { +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/access_key_credential.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/access_key_credential.go new file mode 100644 index 0000000000..68f8226330 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/access_key_credential.go @@ -0,0 +1,34 @@ +package credentials + +// Deprecated: Use AccessKeyCredential in this package instead. +type BaseCredential struct { + AccessKeyId string + AccessKeySecret string +} + +type AccessKeyCredential struct { + AccessKeyId string + AccessKeySecret string +} + +// Deprecated: Use NewAccessKeyCredential in this package instead. +func NewBaseCredential(accessKeyId, accessKeySecret string) *BaseCredential { + return &BaseCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + } +} + +func (baseCred *BaseCredential) ToAccessKeyCredential() *AccessKeyCredential { + return &AccessKeyCredential{ + AccessKeyId: baseCred.AccessKeyId, + AccessKeySecret: baseCred.AccessKeySecret, + } +} + +func NewAccessKeyCredential(accessKeyId, accessKeySecret string) *AccessKeyCredential { + return &AccessKeyCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/ecs_ram_role.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/ecs_ram_role.go new file mode 100644 index 0000000000..1e1f73ae4a --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/ecs_ram_role.go @@ -0,0 +1,29 @@ +package credentials + +// Deprecated: Use EcsRamRoleCredential in this package instead. +type StsRoleNameOnEcsCredential struct { + RoleName string +} + +// Deprecated: Use NewEcsRamRoleCredential in this package instead. +func NewStsRoleNameOnEcsCredential(roleName string) *StsRoleNameOnEcsCredential { + return &StsRoleNameOnEcsCredential{ + RoleName: roleName, + } +} + +func (oldCred *StsRoleNameOnEcsCredential) ToEcsRamRoleCredential() *EcsRamRoleCredential { + return &EcsRamRoleCredential{ + RoleName: oldCred.RoleName, + } +} + +type EcsRamRoleCredential struct { + RoleName string +} + +func NewEcsRamRoleCredential(roleName string) *EcsRamRoleCredential { + return &EcsRamRoleCredential{ + RoleName: roleName, + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/rsa_key_pair_credential.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/rsa_key_pair_credential.go new file mode 100644 index 0000000000..00d688eb8d --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/rsa_key_pair_credential.go @@ -0,0 +1,15 @@ +package credentials + +type RsaKeyPairCredential struct { + PrivateKey string + PublicKeyId string + SessionExpiration int +} + +func NewRsaKeyPairCredential(privateKey, publicKeyId string, sessionExpiration int) *RsaKeyPairCredential { + return &RsaKeyPairCredential{ + PrivateKey: privateKey, + PublicKeyId: publicKeyId, + SessionExpiration: sessionExpiration, + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_credential.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_credential.go new file mode 100644 index 0000000000..554431ff0b --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_credential.go @@ -0,0 +1,15 @@ +package credentials + +type StsTokenCredential struct { + AccessKeyId string + AccessKeySecret string + AccessKeyStsToken string +} + +func NewStsTokenCredential(accessKeyId, accessKeySecret, accessKeyStsToken string) *StsTokenCredential { + return &StsTokenCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + AccessKeyStsToken: accessKeyStsToken, + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_role_arn_credential.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_role_arn_credential.go new file mode 100644 index 0000000000..7a9db75d2c --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/sts_role_arn_credential.go @@ -0,0 +1,49 @@ +package credentials + +// Deprecated: Use RamRoleArnCredential in this package instead. +type StsRoleArnCredential struct { + AccessKeyId string + AccessKeySecret string + RoleArn string + RoleSessionName string + RoleSessionExpiration int +} + +type RamRoleArnCredential struct { + AccessKeyId string + AccessKeySecret string + RoleArn string + RoleSessionName string + RoleSessionExpiration int +} + +// Deprecated: Use RamRoleArnCredential in this package instead. +func NewStsRoleArnCredential(accessKeyId, accessKeySecret, roleArn, roleSessionName string, roleSessionExpiration int) *StsRoleArnCredential { + return &StsRoleArnCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + RoleArn: roleArn, + RoleSessionName: roleSessionName, + RoleSessionExpiration: roleSessionExpiration, + } +} + +func (oldCred *StsRoleArnCredential) ToRamRoleArnCredential() *RamRoleArnCredential { + return &RamRoleArnCredential{ + AccessKeyId: oldCred.AccessKeyId, + AccessKeySecret: oldCred.AccessKeySecret, + RoleArn: oldCred.RoleArn, + RoleSessionName: oldCred.RoleSessionName, + RoleSessionExpiration: oldCred.RoleSessionExpiration, + } +} + +func NewRamRoleArnCredential(accessKeyId, accessKeySecret, roleArn, roleSessionName string, roleSessionExpiration int) *RamRoleArnCredential { + return &RamRoleArnCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + RoleArn: roleArn, + RoleSessionName: roleSessionName, + RoleSessionExpiration: roleSessionExpiration, + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/roa_signature_composer.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/roa_signature_composer.go new file mode 100644 index 0000000000..902f1a6b48 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/roa_signature_composer.go @@ -0,0 +1,121 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package auth + +import ( + "bytes" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils" + "sort" + "strings" +) + +func signRoaRequest(request requests.AcsRequest, signer Signer, regionId string) (err error) { + completeROASignParams(request, signer, regionId) + stringToSign := buildRoaStringToSign(request) + request.SetStringToSign(stringToSign) + signature := signer.Sign(stringToSign, "") + accessKeyId, err := signer.GetAccessKeyId() + if err != nil { + return nil + } + + request.GetHeaders()["Authorization"] = "acs " + accessKeyId + ":" + signature + + return +} + +func completeROASignParams(request requests.AcsRequest, signer Signer, regionId string) { + headerParams := request.GetHeaders() + + // complete query params + queryParams := request.GetQueryParams() + if _, ok := queryParams["RegionId"]; !ok { + queryParams["RegionId"] = regionId + } + if extraParam := signer.GetExtraParam(); extraParam != nil { + for key, value := range extraParam { + if key == "SecurityToken" { + headerParams["x-acs-security-token"] = value + continue + } + + queryParams[key] = value + } + } + + // complete header params + headerParams["Date"] = utils.GetTimeInFormatRFC2616() + headerParams["x-acs-signature-method"] = signer.GetName() + headerParams["x-acs-signature-version"] = signer.GetVersion() + if request.GetFormParams() != nil && len(request.GetFormParams()) > 0 { + formString := utils.GetUrlFormedMap(request.GetFormParams()) + request.SetContent([]byte(formString)) + headerParams["Content-Type"] = requests.Form + } + contentMD5 := utils.GetMD5Base64(request.GetContent()) + headerParams["Content-MD5"] = contentMD5 + if _, contains := headerParams["Content-Type"]; !contains { + headerParams["Content-Type"] = requests.Raw + } + switch format := request.GetAcceptFormat(); format { + case "JSON": + headerParams["Accept"] = requests.Json + case "XML": + headerParams["Accept"] = requests.Xml + default: + headerParams["Accept"] = requests.Raw + } +} + +func buildRoaStringToSign(request requests.AcsRequest) (stringToSign string) { + + headers := request.GetHeaders() + + stringToSignBuilder := bytes.Buffer{} + stringToSignBuilder.WriteString(request.GetMethod()) + stringToSignBuilder.WriteString(requests.HeaderSeparator) + + // append header keys for sign + appendIfContain(headers, &stringToSignBuilder, "Accept", requests.HeaderSeparator) + appendIfContain(headers, &stringToSignBuilder, "Content-MD5", requests.HeaderSeparator) + appendIfContain(headers, &stringToSignBuilder, "Content-Type", requests.HeaderSeparator) + appendIfContain(headers, &stringToSignBuilder, "Date", requests.HeaderSeparator) + + // sort and append headers witch starts with 'x-acs-' + var acsHeaders []string + for key := range headers { + if strings.HasPrefix(key, "x-acs-") { + acsHeaders = append(acsHeaders, key) + } + } + sort.Strings(acsHeaders) + for _, key := range acsHeaders { + stringToSignBuilder.WriteString(key + ":" + headers[key]) + stringToSignBuilder.WriteString(requests.HeaderSeparator) + } + + // append query params + stringToSignBuilder.WriteString(request.BuildQueries()) + stringToSign = stringToSignBuilder.String() + return +} + +func appendIfContain(sourceMap map[string]string, target *bytes.Buffer, key, separator string) { + if value, contain := sourceMap[key]; contain && len(value) > 0 { + target.WriteString(sourceMap[key]) + target.WriteString(separator) + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/rpc_signature_composer.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/rpc_signature_composer.go new file mode 100644 index 0000000000..0c6f6d111e --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/rpc_signature_composer.go @@ -0,0 +1,96 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package auth + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils" + "net/url" + "sort" + "strings" +) + +func signRpcRequest(request requests.AcsRequest, signer Signer, regionId string) (err error) { + err = completeRpcSignParams(request, signer, regionId) + if err != nil { + return + } + // remove while retry + if _, containsSign := request.GetQueryParams()["Signature"]; containsSign { + delete(request.GetQueryParams(), "Signature") + } + stringToSign := buildRpcStringToSign(request) + request.SetStringToSign(stringToSign) + signature := signer.Sign(stringToSign, "&") + request.GetQueryParams()["Signature"] = signature + + return +} + +func completeRpcSignParams(request requests.AcsRequest, signer Signer, regionId string) (err error) { + queryParams := request.GetQueryParams() + queryParams["Version"] = request.GetVersion() + queryParams["Action"] = request.GetActionName() + queryParams["Format"] = request.GetAcceptFormat() + queryParams["Timestamp"] = utils.GetTimeInFormatISO8601() + queryParams["SignatureMethod"] = signer.GetName() + queryParams["SignatureType"] = signer.GetType() + queryParams["SignatureVersion"] = signer.GetVersion() + queryParams["SignatureNonce"] = utils.GetUUIDV4() + queryParams["AccessKeyId"], err = signer.GetAccessKeyId() + + if err != nil { + return + } + + if _, contains := queryParams["RegionId"]; !contains { + queryParams["RegionId"] = regionId + } + if extraParam := signer.GetExtraParam(); extraParam != nil { + for key, value := range extraParam { + queryParams[key] = value + } + } + + request.GetHeaders()["Content-Type"] = requests.Form + formString := utils.GetUrlFormedMap(request.GetFormParams()) + request.SetContent([]byte(formString)) + + return +} + +func buildRpcStringToSign(request requests.AcsRequest) (stringToSign string) { + signParams := make(map[string]string) + for key, value := range request.GetQueryParams() { + signParams[key] = value + } + for key, value := range request.GetFormParams() { + signParams[key] = value + } + + // sort params by key + var paramKeySlice []string + for key := range signParams { + paramKeySlice = append(paramKeySlice, key) + } + sort.Strings(paramKeySlice) + stringToSign = utils.GetUrlFormedMap(signParams) + stringToSign = strings.Replace(stringToSign, "+", "%20", -1) + stringToSign = strings.Replace(stringToSign, "*", "%2A", -1) + stringToSign = strings.Replace(stringToSign, "%7E", "~", -1) + stringToSign = url.QueryEscape(stringToSign) + stringToSign = request.GetMethod() + "&%2F&" + stringToSign + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signer.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signer.go new file mode 100644 index 0000000000..ba946d0fd4 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signer.go @@ -0,0 +1,95 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package auth + +import ( + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "reflect" +) + +type Signer interface { + GetName() string + GetType() string + GetVersion() string + GetAccessKeyId() (string, error) + GetExtraParam() map[string]string + Sign(stringToSign, secretSuffix string) string + Shutdown() +} + +func NewSignerWithCredential(credential Credential, commonApi func(request *requests.CommonRequest, signer interface{}) (response *responses.CommonResponse, err error)) (signer Signer, err error) { + switch instance := credential.(type) { + case *credentials.AccessKeyCredential: + { + signer, err = signers.NewAccessKeySigner(instance) + } + case *credentials.StsTokenCredential: + { + signer, err = signers.NewStsTokenSigner(instance) + } + + case *credentials.RamRoleArnCredential: + { + signer, err = signers.NewRamRoleArnSigner(instance, commonApi) + } + case *credentials.RsaKeyPairCredential: + { + signer, err = signers.NewSignerKeyPair(instance, commonApi) + } + case *credentials.EcsRamRoleCredential: + { + signer, err = signers.NewEcsRamRoleSigner(instance, commonApi) + } + case *credentials.BaseCredential: // deprecated user interface + { + signer, err = signers.NewAccessKeySigner(instance.ToAccessKeyCredential()) + } + case *credentials.StsRoleArnCredential: // deprecated user interface + { + signer, err = signers.NewRamRoleArnSigner(instance.ToRamRoleArnCredential(), commonApi) + } + case *credentials.StsRoleNameOnEcsCredential: // deprecated user interface + { + signer, err = signers.NewEcsRamRoleSigner(instance.ToEcsRamRoleCredential(), commonApi) + } + default: + message := fmt.Sprintf(errors.UnsupportedCredentialErrorMessage, reflect.TypeOf(credential)) + err = errors.NewClientError(errors.UnsupportedCredentialErrorCode, message, nil) + } + return +} + +func Sign(request requests.AcsRequest, signer Signer, regionId string) (err error) { + switch request.GetStyle() { + case requests.ROA: + { + signRoaRequest(request, signer, regionId) + } + case requests.RPC: + { + err = signRpcRequest(request, signer, regionId) + } + default: + message := fmt.Sprintf(errors.UnknownRequestTypeErrorMessage, reflect.TypeOf(request)) + err = errors.NewClientError(errors.UnknownRequestTypeErrorCode, message, nil) + } + + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/algorithms.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/algorithms.go new file mode 100644 index 0000000000..975e985b93 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/algorithms.go @@ -0,0 +1,63 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "crypto" + "crypto/hmac" + "crypto/rand" + "crypto/rsa" + "crypto/sha1" + "crypto/x509" + "encoding/base64" + "fmt" + /*"encoding/pem" + "io/ioutil" + "os/user" + "crypto/sha256"*/) + +func ShaHmac1(source, secret string) string { + key := []byte(secret) + hmac := hmac.New(sha1.New, key) + hmac.Write([]byte(source)) + signedBytes := hmac.Sum(nil) + signedString := base64.StdEncoding.EncodeToString(signedBytes) + return signedString +} + +func Sha256WithRsa(source, secret string) string { + decodeString, err := base64.StdEncoding.DecodeString(secret) + if err != nil { + fmt.Println("DecodeString err", err) + } + private, err := x509.ParsePKCS8PrivateKey(decodeString) + if err != nil { + fmt.Println("ParsePKCS8PrivateKey err", err) + } + + h := crypto.Hash.New(crypto.SHA256) + h.Write([]byte(source)) + hashed := h.Sum(nil) + signature, err := rsa.SignPKCS1v15(rand.Reader, private.(*rsa.PrivateKey), + crypto.SHA256, hashed) + if err != nil { + fmt.Println("Error from signing:", err) + return "" + } + + signedString := base64.StdEncoding.EncodeToString(signature) + //fmt.Printf("Encoded: %v\n", signedString) + return signedString +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/credential_updater.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/credential_updater.go new file mode 100644 index 0000000000..bb73d2441e --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/credential_updater.go @@ -0,0 +1,53 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "time" +) + +const defaultInAdvanceScale = 0.8 + +type credentialUpdater struct { + credentialExpiration int + lastUpdateTimestamp int64 + inAdvanceScale float64 + buildRequestMethod func() (*requests.CommonRequest, error) + responseCallBack func(response *responses.CommonResponse) error + refreshApi func(request *requests.CommonRequest) (response *responses.CommonResponse, err error) +} + +func (updater *credentialUpdater) needUpdateCredential() (result bool) { + if updater.inAdvanceScale == 0 { + updater.inAdvanceScale = defaultInAdvanceScale + } + return time.Now().Unix()-updater.lastUpdateTimestamp >= int64(float64(updater.credentialExpiration)*updater.inAdvanceScale) +} + +func (updater *credentialUpdater) updateCredential() (err error) { + request, err := updater.buildRequestMethod() + if err != nil { + return + } + response, err := updater.refreshApi(request) + if err != nil { + return + } + updater.lastUpdateTimestamp = time.Now().Unix() + err = updater.responseCallBack(response) + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/session_credential.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/session_credential.go new file mode 100644 index 0000000000..99c624c880 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/session_credential.go @@ -0,0 +1,7 @@ +package signers + +type SessionCredential struct { + AccessKeyId string + AccessKeySecret string + StsToken string +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_access_key.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_access_key.go new file mode 100644 index 0000000000..92466ea559 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_access_key.go @@ -0,0 +1,58 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" +) + +type AccessKeySigner struct { + credential *credentials.AccessKeyCredential +} + +func (signer *AccessKeySigner) GetExtraParam() map[string]string { + return nil +} + +func NewAccessKeySigner(credential *credentials.AccessKeyCredential) (*AccessKeySigner, error) { + return &AccessKeySigner{ + credential: credential, + }, nil +} + +func (*AccessKeySigner) GetName() string { + return "HMAC-SHA1" +} + +func (*AccessKeySigner) GetType() string { + return "" +} + +func (*AccessKeySigner) GetVersion() string { + return "1.0" +} + +func (signer *AccessKeySigner) GetAccessKeyId() (accessKeyId string, err error) { + return signer.credential.AccessKeyId, nil +} + +func (signer *AccessKeySigner) Sign(stringToSign, secretSuffix string) string { + secret := signer.credential.AccessKeySecret + secretSuffix + return ShaHmac1(stringToSign, secret) +} + +func (signer *AccessKeySigner) Shutdown() { + +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ecs_ram_role.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ecs_ram_role.go new file mode 100644 index 0000000000..6cacc140d7 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ecs_ram_role.go @@ -0,0 +1,175 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "encoding/json" + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "github.com/jmespath/go-jmespath" + "net/http" + "strings" + "time" +) + +type EcsRamRoleSigner struct { + *credentialUpdater + sessionCredential *SessionCredential + credential *credentials.EcsRamRoleCredential + commonApi func(request *requests.CommonRequest, signer interface{}) (response *responses.CommonResponse, err error) +} + +func NewEcsRamRoleSigner(credential *credentials.EcsRamRoleCredential, commonApi func(*requests.CommonRequest, interface{}) (response *responses.CommonResponse, err error)) (signer *EcsRamRoleSigner, err error) { + signer = &EcsRamRoleSigner{ + credential: credential, + commonApi: commonApi, + } + + signer.credentialUpdater = &credentialUpdater{ + credentialExpiration: defaultDurationSeconds / 60, + buildRequestMethod: signer.buildCommonRequest, + responseCallBack: signer.refreshCredential, + refreshApi: signer.refreshApi, + } + + return +} + +func (*EcsRamRoleSigner) GetName() string { + return "HMAC-SHA1" +} + +func (*EcsRamRoleSigner) GetType() string { + return "" +} + +func (*EcsRamRoleSigner) GetVersion() string { + return "1.0" +} + +func (signer *EcsRamRoleSigner) GetAccessKeyId() (accessKeyId string, err error) { + if signer.sessionCredential == nil || signer.needUpdateCredential() { + err = signer.updateCredential() + } + if err != nil && (signer.sessionCredential == nil || len(signer.sessionCredential.AccessKeyId) <= 0) { + return "", err + } + return signer.sessionCredential.AccessKeyId, nil +} + +func (signer *EcsRamRoleSigner) GetExtraParam() map[string]string { + if signer.sessionCredential == nil { + return make(map[string]string) + } + if len(signer.sessionCredential.StsToken) <= 0 { + return make(map[string]string) + } + return map[string]string{"SecurityToken": signer.sessionCredential.StsToken} +} + +func (signer *EcsRamRoleSigner) Sign(stringToSign, secretSuffix string) string { + secret := signer.sessionCredential.AccessKeyId + secretSuffix + return ShaHmac1(stringToSign, secret) +} + +func (signer *EcsRamRoleSigner) buildCommonRequest() (request *requests.CommonRequest, err error) { + request = requests.NewCommonRequest() + return +} + +func (signer *EcsRamRoleSigner) refreshApi(request *requests.CommonRequest) (response *responses.CommonResponse, err error) { + requestUrl := "http://100.100.100.200/latest/meta-data/ram/security-credentials/" + signer.credential.RoleName + httpRequest, err := http.NewRequest(requests.GET, requestUrl, strings.NewReader("")) + if err != nil { + fmt.Println("refresh Ecs sts token err", err) + return + } + httpClient := &http.Client{} + httpResponse, err := httpClient.Do(httpRequest) + if err != nil { + fmt.Println("refresh Ecs sts token err", err) + return + } + + response = responses.NewCommonResponse() + err = responses.Unmarshal(response, httpResponse, "") + + return +} + +func (signer *EcsRamRoleSigner) refreshCredential(response *responses.CommonResponse) (err error) { + if response.GetHttpStatus() != http.StatusOK { + fmt.Println("refresh Ecs sts token err, httpStatus: " + string(response.GetHttpStatus()) + ", message = " + response.GetHttpContentString()) + return + } + var data interface{} + err = json.Unmarshal(response.GetHttpContentBytes(), &data) + if err != nil { + fmt.Println("refresh Ecs sts token err, json.Unmarshal fail", err) + return + } + code, err := jmespath.Search("Code", data) + if err != nil { + fmt.Println("refresh Ecs sts token err, fail to get Code", err) + return + } + if code.(string) != "Success" { + fmt.Println("refresh Ecs sts token err, Code is not Success", err) + return + } + accessKeyId, err := jmespath.Search("AccessKeyId", data) + if err != nil { + fmt.Println("refresh Ecs sts token err, fail to get AccessKeyId", err) + return + } + accessKeySecret, err := jmespath.Search("AccessKeySecret", data) + if err != nil { + fmt.Println("refresh Ecs sts token err, fail to get AccessKeySecret", err) + return + } + securityToken, err := jmespath.Search("SecurityToken", data) + if err != nil { + fmt.Println("refresh Ecs sts token err, fail to get SecurityToken", err) + return + } + expiration, err := jmespath.Search("Expiration", data) + if err != nil { + fmt.Println("refresh Ecs sts token err, fail to get Expiration", err) + return + } + if accessKeyId == nil || accessKeySecret == nil || securityToken == nil { + return + } + + expirationTime, err := time.Parse("2006-01-02T15:04:05Z", expiration.(string)) + signer.credentialExpiration = int(expirationTime.Unix() - time.Now().Unix()) + signer.sessionCredential = &SessionCredential{ + AccessKeyId: accessKeyId.(string), + AccessKeySecret: accessKeySecret.(string), + StsToken: securityToken.(string), + } + + return +} + +func (signer *EcsRamRoleSigner) GetSessionCredential() *SessionCredential { + return signer.sessionCredential +} + +func (signer *EcsRamRoleSigner) Shutdown() { + +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_key_pair.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_key_pair.go new file mode 100644 index 0000000000..c5fe39456c --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_key_pair.go @@ -0,0 +1,148 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "encoding/json" + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "github.com/jmespath/go-jmespath" + "net/http" + "strconv" +) + +type SignerKeyPair struct { + *credentialUpdater + sessionCredential *SessionCredential + credential *credentials.RsaKeyPairCredential + commonApi func(request *requests.CommonRequest, signer interface{}) (response *responses.CommonResponse, err error) +} + +func NewSignerKeyPair(credential *credentials.RsaKeyPairCredential, commonApi func(*requests.CommonRequest, interface{}) (response *responses.CommonResponse, err error)) (signer *SignerKeyPair, err error) { + signer = &SignerKeyPair{ + credential: credential, + commonApi: commonApi, + } + + signer.credentialUpdater = &credentialUpdater{ + credentialExpiration: credential.SessionExpiration, + buildRequestMethod: signer.buildCommonRequest, + responseCallBack: signer.refreshCredential, + refreshApi: signer.refreshApi, + } + + if credential.SessionExpiration > 0 { + if credential.SessionExpiration >= 900 && credential.SessionExpiration <= 3600 { + signer.credentialExpiration = credential.SessionExpiration + } else { + err = errors.NewClientError(errors.InvalidParamErrorCode, "Key Pair session duration should be in the range of 15min - 1Hr", nil) + } + } else { + signer.credentialExpiration = defaultDurationSeconds + } + return +} + +func (*SignerKeyPair) GetName() string { + return "HMAC-SHA1" +} + +func (*SignerKeyPair) GetType() string { + return "" +} + +func (*SignerKeyPair) GetVersion() string { + return "1.0" +} + +func (signer *SignerKeyPair) GetAccessKeyId() (accessKeyId string, err error) { + if signer.sessionCredential == nil || signer.needUpdateCredential() { + err = signer.updateCredential() + } + if err != nil && (signer.sessionCredential == nil || len(signer.sessionCredential.AccessKeyId) <= 0) { + return "", err + } + return signer.sessionCredential.AccessKeyId, err +} + +func (signer *SignerKeyPair) GetExtraParam() map[string]string { + if signer.sessionCredential == nil || signer.needUpdateCredential() { + signer.updateCredential() + } + if signer.sessionCredential == nil || len(signer.sessionCredential.AccessKeyId) <= 0 { + return make(map[string]string) + } + return make(map[string]string) +} + +func (signer *SignerKeyPair) Sign(stringToSign, secretSuffix string) string { + secret := signer.sessionCredential.AccessKeyId + secretSuffix + return ShaHmac1(stringToSign, secret) +} + +func (signer *SignerKeyPair) buildCommonRequest() (request *requests.CommonRequest, err error) { + request = requests.NewCommonRequest() + request.Product = "Sts" + request.Version = "2015-04-01" + request.ApiName = "GenerateSessionAccessKey" + request.Scheme = requests.HTTPS + request.QueryParams["PublicKeyId"] = signer.credential.PublicKeyId + request.QueryParams["DurationSeconds"] = strconv.Itoa(signer.credentialExpiration) + return +} + +func (signerKeyPair *SignerKeyPair) refreshApi(request *requests.CommonRequest) (response *responses.CommonResponse, err error) { + signerV2, err := NewSignerV2(signerKeyPair.credential) + return signerKeyPair.commonApi(request, signerV2) +} + +func (signer *SignerKeyPair) refreshCredential(response *responses.CommonResponse) (err error) { + if response.GetHttpStatus() != http.StatusOK { + message := "refresh session AccessKey failed" + err = errors.NewServerError(response.GetHttpStatus(), response.GetHttpContentString(), message) + return + } + var data interface{} + err = json.Unmarshal(response.GetHttpContentBytes(), &data) + if err != nil { + fmt.Println("refresh KeyPair err, json.Unmarshal fail", err) + return + } + accessKeyId, err := jmespath.Search("SessionAccessKey.SessionAccessKeyId", data) + if err != nil { + fmt.Println("refresh KeyPair err, fail to get SessionAccessKeyId", err) + return + } + accessKeySecret, err := jmespath.Search("SessionAccessKey.SessionAccessKeySecret", data) + if err != nil { + fmt.Println("refresh KeyPair err, fail to get SessionAccessKeySecret", err) + return + } + if accessKeyId == nil || accessKeySecret == nil { + return + } + signer.sessionCredential = &SessionCredential{ + AccessKeyId: accessKeyId.(string), + AccessKeySecret: accessKeySecret.(string), + } + return +} + +func (signer *SignerKeyPair) Shutdown() { + +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ram_role_arn.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ram_role_arn.go new file mode 100644 index 0000000000..bfcaa9573c --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_ram_role_arn.go @@ -0,0 +1,174 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "encoding/json" + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "github.com/jmespath/go-jmespath" + "net/http" + "strconv" + "time" +) + +const ( + defaultDurationSeconds = 3600 +) + +type RamRoleArnSigner struct { + *credentialUpdater + roleSessionName string + sessionCredential *SessionCredential + credential *credentials.RamRoleArnCredential + commonApi func(request *requests.CommonRequest, signer interface{}) (response *responses.CommonResponse, err error) +} + +func NewRamRoleArnSigner(credential *credentials.RamRoleArnCredential, commonApi func(request *requests.CommonRequest, signer interface{}) (response *responses.CommonResponse, err error)) (signer *RamRoleArnSigner, err error) { + signer = &RamRoleArnSigner{ + credential: credential, + commonApi: commonApi, + } + + signer.credentialUpdater = &credentialUpdater{ + credentialExpiration: credential.RoleSessionExpiration, + buildRequestMethod: signer.buildCommonRequest, + responseCallBack: signer.refreshCredential, + refreshApi: signer.refreshApi, + } + + if len(credential.RoleSessionName) > 0 { + signer.roleSessionName = credential.RoleSessionName + } else { + signer.roleSessionName = "aliyun-go-sdk-" + strconv.FormatInt(time.Now().UnixNano()/1000, 10) + } + if credential.RoleSessionExpiration > 0 { + if credential.RoleSessionExpiration >= 900 && credential.RoleSessionExpiration <= 3600 { + signer.credentialExpiration = credential.RoleSessionExpiration + } else { + err = errors.NewClientError(errors.InvalidParamErrorCode, "Assume Role session duration should be in the range of 15min - 1Hr", nil) + } + } else { + signer.credentialExpiration = defaultDurationSeconds + } + return +} + +func (*RamRoleArnSigner) GetName() string { + return "HMAC-SHA1" +} + +func (*RamRoleArnSigner) GetType() string { + return "" +} + +func (*RamRoleArnSigner) GetVersion() string { + return "1.0" +} + +func (signer *RamRoleArnSigner) GetAccessKeyId() (accessKeyId string, err error) { + if signer.sessionCredential == nil || signer.needUpdateCredential() { + err = signer.updateCredential() + } + if err != nil && (signer.sessionCredential == nil || len(signer.sessionCredential.AccessKeyId) <= 0) { + return "", err + } + return signer.sessionCredential.AccessKeyId, nil +} + +func (signer *RamRoleArnSigner) GetExtraParam() map[string]string { + if signer.sessionCredential == nil || signer.needUpdateCredential() { + signer.updateCredential() + } + if signer.sessionCredential == nil || len(signer.sessionCredential.StsToken) <= 0 { + return make(map[string]string) + } + return map[string]string{"SecurityToken": signer.sessionCredential.StsToken} +} + +func (signer *RamRoleArnSigner) Sign(stringToSign, secretSuffix string) string { + secret := signer.sessionCredential.AccessKeySecret + secretSuffix + return ShaHmac1(stringToSign, secret) +} + +func (signer *RamRoleArnSigner) buildCommonRequest() (request *requests.CommonRequest, err error) { + request = requests.NewCommonRequest() + request.Product = "Sts" + request.Version = "2015-04-01" + request.ApiName = "AssumeRole" + request.Scheme = requests.HTTPS + request.QueryParams["RoleArn"] = signer.credential.RoleArn + request.QueryParams["RoleSessionName"] = signer.credential.RoleSessionName + request.QueryParams["DurationSeconds"] = strconv.Itoa(signer.credentialExpiration) + return +} + +func (signer *RamRoleArnSigner) refreshApi(request *requests.CommonRequest) (response *responses.CommonResponse, err error) { + credential := &credentials.AccessKeyCredential{ + AccessKeyId: signer.credential.AccessKeyId, + AccessKeySecret: signer.credential.AccessKeySecret, + } + signerV1, err := NewAccessKeySigner(credential) + return signer.commonApi(request, signerV1) +} + +func (signer *RamRoleArnSigner) refreshCredential(response *responses.CommonResponse) (err error) { + if response.GetHttpStatus() != http.StatusOK { + message := "refresh session token failed" + err = errors.NewServerError(response.GetHttpStatus(), response.GetHttpContentString(), message) + return + } + var data interface{} + err = json.Unmarshal(response.GetHttpContentBytes(), &data) + if err != nil { + fmt.Println("refresh RoleArn sts token err, json.Unmarshal fail", err) + return + } + accessKeyId, err := jmespath.Search("Credentials.AccessKeyId", data) + if err != nil { + fmt.Println("refresh RoleArn sts token err, fail to get AccessKeyId", err) + return + } + accessKeySecret, err := jmespath.Search("Credentials.AccessKeySecret", data) + if err != nil { + fmt.Println("refresh RoleArn sts token err, fail to get AccessKeySecret", err) + return + } + securityToken, err := jmespath.Search("Credentials.SecurityToken", data) + if err != nil { + fmt.Println("refresh RoleArn sts token err, fail to get SecurityToken", err) + return + } + if accessKeyId == nil || accessKeySecret == nil || securityToken == nil { + return + } + signer.sessionCredential = &SessionCredential{ + AccessKeyId: accessKeyId.(string), + AccessKeySecret: accessKeySecret.(string), + StsToken: securityToken.(string), + } + return +} + +func (signer *RamRoleArnSigner) GetSessionCredential() *SessionCredential { + return signer.sessionCredential +} + +func (signer *RamRoleArnSigner) Shutdown() { + +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_sts_token.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_sts_token.go new file mode 100644 index 0000000000..9e178d0fb7 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_sts_token.go @@ -0,0 +1,58 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" +) + +type StsTokenSigner struct { + credential *credentials.StsTokenCredential +} + +func NewStsTokenSigner(credential *credentials.StsTokenCredential) (*StsTokenSigner, error) { + return &StsTokenSigner{ + credential: credential, + }, nil +} + +func (*StsTokenSigner) GetName() string { + return "HMAC-SHA1" +} + +func (*StsTokenSigner) GetType() string { + return "" +} + +func (*StsTokenSigner) GetVersion() string { + return "1.0" +} + +func (signer *StsTokenSigner) GetAccessKeyId() (accessKeyId string, err error) { + return signer.credential.AccessKeyId, nil +} + +func (signer *StsTokenSigner) GetExtraParam() map[string]string { + return map[string]string{"SecurityToken": signer.credential.AccessKeyStsToken} +} + +func (signer *StsTokenSigner) Sign(stringToSign, secretSuffix string) string { + secret := signer.credential.AccessKeySecret + secretSuffix + return ShaHmac1(stringToSign, secret) +} + +func (signer *StsTokenSigner) Shutdown() { + +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_v2.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_v2.go new file mode 100644 index 0000000000..1814fe7bd1 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/signers/signer_v2.go @@ -0,0 +1,58 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package signers + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" +) + +type SignerV2 struct { + credential *credentials.RsaKeyPairCredential +} + +func (signer *SignerV2) GetExtraParam() map[string]string { + return nil +} + +func NewSignerV2(credential *credentials.RsaKeyPairCredential) (*SignerV2, error) { + return &SignerV2{ + credential: credential, + }, nil +} + +func (*SignerV2) GetName() string { + return "SHA256withRSA" +} + +func (*SignerV2) GetType() string { + return "PRIVATEKEY" +} + +func (*SignerV2) GetVersion() string { + return "1.0" +} + +func (signer *SignerV2) GetAccessKeyId() (accessKeyId string, err error) { + return signer.credential.PublicKeyId, err +} + +func (signer *SignerV2) Sign(stringToSign, secretSuffix string) string { + secret := signer.credential.PrivateKey + return Sha256WithRsa(stringToSign, secret) +} + +func (signer *SignerV2) Shutdown() { + +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/client.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/client.go new file mode 100644 index 0000000000..26da462bf9 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/client.go @@ -0,0 +1,417 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package sdk + +import ( + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "net" + "net/http" + "strconv" + "sync" +) + +// this value will be replaced while build: -ldflags="-X sdk.version=x.x.x" +var Version = "0.0.1" + +type Client struct { + regionId string + config *Config + signer auth.Signer + httpClient *http.Client + asyncTaskQueue chan func() + + debug bool + isRunning bool + // void "panic(write to close channel)" cause of addAsync() after Shutdown() + asyncChanLock *sync.RWMutex +} + +func (client *Client) Init() (err error) { + panic("not support yet") +} + +func (client *Client) InitWithOptions(regionId string, config *Config, credential auth.Credential) (err error) { + client.isRunning = true + client.asyncChanLock = new(sync.RWMutex) + client.regionId = regionId + client.config = config + if err != nil { + return + } + client.httpClient = &http.Client{} + + if config.HttpTransport != nil { + client.httpClient.Transport = config.HttpTransport + } + + if config.Timeout > 0 { + client.httpClient.Timeout = config.Timeout + } + + if config.EnableAsync { + client.EnableAsync(config.GoRoutinePoolSize, config.MaxTaskQueueSize) + } + + client.signer, err = auth.NewSignerWithCredential(credential, client.ProcessCommonRequestWithSigner) + + return +} + +func (client *Client) EnableAsync(routinePoolSize, maxTaskQueueSize int) { + client.asyncTaskQueue = make(chan func(), maxTaskQueueSize) + for i := 0; i < routinePoolSize; i++ { + go func() { + for client.isRunning { + select { + case task, notClosed := <-client.asyncTaskQueue: + if notClosed { + task() + } + } + } + }() + } +} + +func (client *Client) InitWithAccessKey(regionId, accessKeyId, accessKeySecret string) (err error) { + config := client.InitClientConfig() + credential := &credentials.BaseCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + } + return client.InitWithOptions(regionId, config, credential) +} + +func (client *Client) InitWithStsToken(regionId, accessKeyId, accessKeySecret, securityToken string) (err error) { + config := client.InitClientConfig() + credential := &credentials.StsTokenCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + AccessKeyStsToken: securityToken, + } + return client.InitWithOptions(regionId, config, credential) +} + +func (client *Client) InitWithRamRoleArn(regionId, accessKeyId, accessKeySecret, roleArn, roleSessionName string) (err error) { + config := client.InitClientConfig() + credential := &credentials.RamRoleArnCredential{ + AccessKeyId: accessKeyId, + AccessKeySecret: accessKeySecret, + RoleArn: roleArn, + RoleSessionName: roleSessionName, + } + return client.InitWithOptions(regionId, config, credential) +} + +func (client *Client) InitWithRsaKeyPair(regionId, publicKeyId, privateKey string, sessionExpiration int) (err error) { + config := client.InitClientConfig() + credential := &credentials.RsaKeyPairCredential{ + PrivateKey: privateKey, + PublicKeyId: publicKeyId, + SessionExpiration: sessionExpiration, + } + return client.InitWithOptions(regionId, config, credential) +} + +func (client *Client) InitWithEcsRamRole(regionId, roleName string) (err error) { + config := client.InitClientConfig() + credential := &credentials.EcsRamRoleCredential{ + RoleName: roleName, + } + return client.InitWithOptions(regionId, config, credential) +} + +func (client *Client) InitClientConfig() (config *Config) { + if client.config != nil { + return client.config + } else { + return NewConfig() + } +} + +func (client *Client) DoAction(request requests.AcsRequest, response responses.AcsResponse) (err error) { + return client.DoActionWithSigner(request, response, nil) +} + +func (client *Client) BuildRequestWithSigner(request requests.AcsRequest, signer auth.Signer) (err error) { + // add clientVersion + request.GetHeaders()["x-sdk-core-version"] = Version + + regionId := client.regionId + if len(request.GetRegionId()) > 0 { + regionId = request.GetRegionId() + } + + // resolve endpoint + resolveParam := &endpoints.ResolveParam{ + Domain: request.GetDomain(), + Product: request.GetProduct(), + RegionId: regionId, + LocationProduct: request.GetLocationServiceCode(), + LocationEndpointType: request.GetLocationEndpointType(), + CommonApi: client.ProcessCommonRequest, + } + endpoint, err := endpoints.Resolve(resolveParam) + if err != nil { + return + } + request.SetDomain(endpoint) + + // init request params + err = requests.InitParams(request) + if err != nil { + return + } + + // signature + var finalSigner auth.Signer + if signer != nil { + finalSigner = signer + } else { + finalSigner = client.signer + } + httpRequest, err := buildHttpRequest(request, finalSigner, regionId) + if client.config.UserAgent != "" { + httpRequest.Header.Set("User-Agent", client.config.UserAgent) + } + return err +} + +func (client *Client) DoActionWithSigner(request requests.AcsRequest, response responses.AcsResponse, signer auth.Signer) (err error) { + + // add clientVersion + request.GetHeaders()["x-sdk-core-version"] = Version + + regionId := client.regionId + if len(request.GetRegionId()) > 0 { + regionId = request.GetRegionId() + } + + // resolve endpoint + resolveParam := &endpoints.ResolveParam{ + Domain: request.GetDomain(), + Product: request.GetProduct(), + RegionId: regionId, + LocationProduct: request.GetLocationServiceCode(), + LocationEndpointType: request.GetLocationEndpointType(), + CommonApi: client.ProcessCommonRequest, + } + endpoint, err := endpoints.Resolve(resolveParam) + if err != nil { + return + } + request.SetDomain(endpoint) + + if request.GetScheme() == "" { + request.SetScheme(client.config.Scheme) + } + // init request params + err = requests.InitParams(request) + if err != nil { + return + } + + // signature + var finalSigner auth.Signer + if signer != nil { + finalSigner = signer + } else { + finalSigner = client.signer + } + httpRequest, err := buildHttpRequest(request, finalSigner, regionId) + if client.config.UserAgent != "" { + httpRequest.Header.Set("User-Agent", client.config.UserAgent) + } + if err != nil { + return + } + var httpResponse *http.Response + for retryTimes := 0; retryTimes <= client.config.MaxRetryTime; retryTimes++ { + httpResponse, err = client.httpClient.Do(httpRequest) + + var timeout bool + // receive error + if err != nil { + if timeout = isTimeout(err); !timeout { + // if not timeout error, return + return + } else if retryTimes >= client.config.MaxRetryTime { + // timeout but reached the max retry times, return + timeoutErrorMsg := fmt.Sprintf(errors.TimeoutErrorMessage, strconv.Itoa(retryTimes+1), strconv.Itoa(retryTimes+1)) + err = errors.NewClientError(errors.TimeoutErrorCode, timeoutErrorMsg, err) + return + } + } + // if status code >= 500 or timeout, will trigger retry + if client.config.AutoRetry && (timeout || isServerError(httpResponse)) { + // rewrite signatureNonce and signature + httpRequest, err = buildHttpRequest(request, finalSigner, regionId) + if err != nil { + return + } + continue + } + break + } + err = responses.Unmarshal(response, httpResponse, request.GetAcceptFormat()) + // wrap server errors + if serverErr, ok := err.(*errors.ServerError); ok { + var wrapInfo = map[string]string{} + wrapInfo["StringToSign"] = request.GetStringToSign() + err = errors.WrapServerError(serverErr, wrapInfo) + } + return +} + +func buildHttpRequest(request requests.AcsRequest, singer auth.Signer, regionId string) (httpRequest *http.Request, err error) { + err = auth.Sign(request, singer, regionId) + if err != nil { + return + } + requestMethod := request.GetMethod() + requestUrl := request.BuildUrl() + body := request.GetBodyReader() + httpRequest, err = http.NewRequest(requestMethod, requestUrl, body) + if err != nil { + return + } + for key, value := range request.GetHeaders() { + httpRequest.Header[key] = []string{value} + } + // host is a special case + if host, containsHost := request.GetHeaders()["Host"]; containsHost { + httpRequest.Host = host + } + return +} + +func isTimeout(err error) bool { + if err == nil { + return false + } + netErr, isNetError := err.(net.Error) + return isNetError && netErr.Timeout() +} + +func isServerError(httpResponse *http.Response) bool { + return httpResponse.StatusCode >= http.StatusInternalServerError +} + +/** +only block when any one of the following occurs: +1. the asyncTaskQueue is full, increase the queue size to avoid this +2. Shutdown() in progressing, the client is being closed +**/ +func (client *Client) AddAsyncTask(task func()) (err error) { + if client.asyncTaskQueue != nil { + client.asyncChanLock.RLock() + defer client.asyncChanLock.RUnlock() + if client.isRunning { + client.asyncTaskQueue <- task + } + } else { + err = errors.NewClientError(errors.AsyncFunctionNotEnabledCode, errors.AsyncFunctionNotEnabledMessage, nil) + } + return +} + +func (client *Client) GetConfig() *Config { + return client.config +} + +func NewClient() (client *Client, err error) { + client = &Client{} + err = client.Init() + return +} + +func NewClientWithOptions(regionId string, config *Config, credential auth.Credential) (client *Client, err error) { + client = &Client{} + err = client.InitWithOptions(regionId, config, credential) + return +} + +func NewClientWithAccessKey(regionId, accessKeyId, accessKeySecret string) (client *Client, err error) { + client = &Client{} + err = client.InitWithAccessKey(regionId, accessKeyId, accessKeySecret) + return +} + +func NewClientWithStsToken(regionId, stsAccessKeyId, stsAccessKeySecret, stsToken string) (client *Client, err error) { + client = &Client{} + err = client.InitWithStsToken(regionId, stsAccessKeyId, stsAccessKeySecret, stsToken) + return +} + +func NewClientWithRamRoleArn(regionId string, accessKeyId, accessKeySecret, roleArn, roleSessionName string) (client *Client, err error) { + client = &Client{} + err = client.InitWithRamRoleArn(regionId, accessKeyId, accessKeySecret, roleArn, roleSessionName) + return +} + +func NewClientWithEcsRamRole(regionId string, roleName string) (client *Client, err error) { + client = &Client{} + err = client.InitWithEcsRamRole(regionId, roleName) + return +} + +func NewClientWithRsaKeyPair(regionId string, publicKeyId, privateKey string, sessionExpiration int) (client *Client, err error) { + client = &Client{} + err = client.InitWithRsaKeyPair(regionId, publicKeyId, privateKey, sessionExpiration) + return +} + +// Deprecated: Use NewClientWithRamRoleArn in this package instead. +func NewClientWithStsRoleArn(regionId string, accessKeyId, accessKeySecret, roleArn, roleSessionName string) (client *Client, err error) { + return NewClientWithRamRoleArn(regionId, accessKeyId, accessKeySecret, roleArn, roleSessionName) +} + +// Deprecated: Use NewClientWithEcsRamRole in this package instead. +func NewClientWithStsRoleNameOnEcs(regionId string, roleName string) (client *Client, err error) { + return NewClientWithEcsRamRole(regionId, roleName) +} + +func (client *Client) ProcessCommonRequest(request *requests.CommonRequest) (response *responses.CommonResponse, err error) { + request.TransToAcsRequest() + response = responses.NewCommonResponse() + err = client.DoAction(request, response) + return +} + +func (client *Client) ProcessCommonRequestWithSigner(request *requests.CommonRequest, signerInterface interface{}) (response *responses.CommonResponse, err error) { + if signer, isSigner := signerInterface.(auth.Signer); isSigner { + request.TransToAcsRequest() + response = responses.NewCommonResponse() + err = client.DoActionWithSigner(request, response, signer) + return + } else { + panic("should not be here") + } +} + +func (client *Client) Shutdown() { + client.signer.Shutdown() + // lock the addAsync() + client.asyncChanLock.Lock() + defer client.asyncChanLock.Unlock() + client.isRunning = false + close(client.asyncTaskQueue) +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/config.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/config.go new file mode 100644 index 0000000000..c67c2ad097 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/config.go @@ -0,0 +1,85 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package sdk + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils" + "net/http" + "time" +) + +type Config struct { + AutoRetry bool `default:"true"` + MaxRetryTime int `default:"3"` + UserAgent string `default:""` + Debug bool `default:"false"` + Timeout time.Duration `default:"10000000000"` + HttpTransport *http.Transport `default:""` + EnableAsync bool `default:"false"` + MaxTaskQueueSize int `default:"1000"` + GoRoutinePoolSize int `default:"5"` + Scheme string `default:"HTTP"` +} + +func NewConfig() (config *Config) { + config = &Config{} + utils.InitStructWithDefaultTag(config) + return +} + +func (c *Config) WithTimeout(timeout time.Duration) *Config { + c.Timeout = timeout + return c +} + +func (c *Config) WithAutoRetry(isAutoRetry bool) *Config { + c.AutoRetry = isAutoRetry + return c +} + +func (c *Config) WithMaxRetryTime(maxRetryTime int) *Config { + c.MaxRetryTime = maxRetryTime + return c +} + +func (c *Config) WithUserAgent(userAgent string) *Config { + c.UserAgent = userAgent + return c +} + +func (c *Config) WithHttpTransport(httpTransport *http.Transport) *Config { + c.HttpTransport = httpTransport + return c +} + +func (c *Config) WithEnableAsync(isEnableAsync bool) *Config { + c.EnableAsync = isEnableAsync + return c +} + +func (c *Config) WithMaxTaskQueueSize(maxTaskQueueSize int) *Config { + c.MaxTaskQueueSize = maxTaskQueueSize + return c +} + +func (c *Config) WithGoRoutinePoolSize(goRoutinePoolSize int) *Config { + c.GoRoutinePoolSize = goRoutinePoolSize + return c +} + +func (c *Config) WithDebug(isDebug bool) *Config { + c.Debug = isDebug + return c +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/endpoints_config.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/endpoints_config.go new file mode 100644 index 0000000000..ec0d56f60b --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/endpoints_config.go @@ -0,0 +1,505 @@ +package endpoints + +import ( + "encoding/json" + "fmt" + "sync" +) + +const endpointsJson = "{" + + " \"products\":[" + + " {" + + " \"code\": \"aegis\"," + + " \"document_id\": \"28449\"," + + " \"location_service_code\": \"vipaegis\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"aegis.cn-hangzhou.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"alidns\"," + + " \"document_id\": \"29739\"," + + " \"location_service_code\": \"alidns\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"alidns.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"arms\"," + + " \"document_id\": \"42924\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"ap-southeast-1\"," + + " \"endpoint\": \"arms.ap-southeast-1.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-beijing\"," + + " \"endpoint\": \"arms.cn-beijing.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-hangzhou\"," + + " \"endpoint\": \"arms.cn-hangzhou.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-hongkong\"," + + " \"endpoint\": \"arms.cn-hongkong.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-qingdao\"," + + " \"endpoint\": \"arms.cn-qingdao.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shanghai\"," + + " \"endpoint\": \"arms.cn-shanghai.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shenzhen\"," + + " \"endpoint\": \"arms.cn-shenzhen.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"arms.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"batchcompute\"," + + " \"document_id\": \"44717\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"ap-southeast-1\"," + + " \"endpoint\": \"batchcompute.ap-southeast-1.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-beijing\"," + + " \"endpoint\": \"batchcompute.cn-beijing.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-hangzhou\"," + + " \"endpoint\": \"batchcompute.cn-hangzhou.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-huhehaote\"," + + " \"endpoint\": \"batchcompute.cn-huhehaote.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-qingdao\"," + + " \"endpoint\": \"batchcompute.cn-qingdao.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shanghai\"," + + " \"endpoint\": \"batchcompute.cn-shanghai.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shenzhen\"," + + " \"endpoint\": \"batchcompute.cn-shenzhen.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-zhangjiakou\"," + + " \"endpoint\": \"batchcompute.cn-zhangjiakou.aliyuncs.com\"" + + " }, {" + + " \"region\": \"us-west-1\"," + + " \"endpoint\": \"batchcompute.us-west-1.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"batchcompute.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"ccc\"," + + " \"document_id\": \"63027\"," + + " \"location_service_code\": \"ccc\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"cn-hangzhou\"," + + " \"endpoint\": \"ccc.cn-hangzhou.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shanghai\"," + + " \"endpoint\": \"ccc.cn-shanghai.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"ccc.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"cdn\"," + + " \"document_id\": \"27148\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cdn.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"cds\"," + + " \"document_id\": \"62887\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cds.cn-beijing.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"chatbot\"," + + " \"document_id\": \"60760\"," + + " \"location_service_code\": \"beebot\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"chatbot.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"cloudapi\"," + + " \"document_id\": \"43590\"," + + " \"location_service_code\": \"apigateway\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"ap-northeast-1\"," + + " \"endpoint\": \"apigateway.ap-northeast-1.aliyuncs.com\"" + + " }, {" + + " \"region\": \"us-west-1\"," + + " \"endpoint\": \"apigateway.us-west-1.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"apigateway.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"cloudauth\"," + + " \"document_id\": \"60687\"," + + " \"location_service_code\": \"cloudauth\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cloudauth.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"cloudphoto\"," + + " \"document_id\": \"59902\"," + + " \"location_service_code\": \"cloudphoto\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"cloudphoto.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"cloudwf\"," + + " \"document_id\": \"58111\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cloudwf.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"cms\"," + + " \"document_id\": \"28615\"," + + " \"location_service_code\": \"cms\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"cr\"," + + " \"document_id\": \"60716\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cr.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"cs\"," + + " \"document_id\": \"26043\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cs.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"csb\"," + + " \"document_id\": \"64837\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"cn-beijing\"," + + " \"endpoint\": \"csb.cn-beijing.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-hangzhou\"," + + " \"endpoint\": \"csb.cn-hangzhou.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"csb.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"dds\"," + + " \"document_id\": \"61715\"," + + " \"location_service_code\": \"dds\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"mongodb.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"mongodb.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"dm\"," + + " \"document_id\": \"29434\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"ap-southeast-1\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"ap-southeast-2\"," + + " \"endpoint\": \"dm.ap-southeast-2.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-beijing\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-hangzhou\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-hongkong\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-qingdao\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shanghai\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"cn-shenzhen\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"us-east-1\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }, {" + + " \"region\": \"us-west-1\"," + + " \"endpoint\": \"dm.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"dm.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"dm.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"domain\"," + + " \"document_id\": \"42875\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"domain.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"domain.aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"domain-intl\"," + + " \"document_id\": \"\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"domain-intl.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"domain-intl.aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"drds\"," + + " \"document_id\": \"51111\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"drds.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"drds.aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"ecs\"," + + " \"document_id\": \"25484\"," + + " \"location_service_code\": \"ecs\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"emr\"," + + " \"document_id\": \"28140\"," + + " \"location_service_code\": \"emr\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"emr.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"ess\"," + + " \"document_id\": \"25925\"," + + " \"location_service_code\": \"ess\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"ess.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"green\"," + + " \"document_id\": \"28427\"," + + " \"location_service_code\": \"green\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"green.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"hpc\"," + + " \"document_id\": \"35201\"," + + " \"location_service_code\": \"hpc\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"hpc.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"httpdns\"," + + " \"document_id\": \"52679\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"httpdns-api.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"iot\"," + + " \"document_id\": \"30557\"," + + " \"location_service_code\": \"iot\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"iot.[RegionId].aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"itaas\"," + + " \"document_id\": \"55759\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"itaas.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"jaq\"," + + " \"document_id\": \"35037\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"jaq.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"live\"," + + " \"document_id\": \"48207\"," + + " \"location_service_code\": \"live\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"live.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"mts\"," + + " \"document_id\": \"29212\"," + + " \"location_service_code\": \"mts\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"nas\"," + + " \"document_id\": \"62598\"," + + " \"location_service_code\": \"nas\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"ons\"," + + " \"document_id\": \"44416\"," + + " \"location_service_code\": \"ons\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"polardb\"," + + " \"document_id\": \"58764\"," + + " \"location_service_code\": \"polardb\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"ap-south-1\"," + + " \"endpoint\": \"polardb.ap-south-1.aliyuncs.com\"" + + " }, {" + + " \"region\": \"ap-southeast-5\"," + + " \"endpoint\": \"polardb.ap-southeast-5.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"polardb.aliyuncs.com\"" + + " }," + + " {" + + " \"code\": \"push\"," + + " \"document_id\": \"30074\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"cloudpush.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"qualitycheck\"," + + " \"document_id\": \"50807\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": [ {" + + " \"region\": \"cn-hangzhou\"," + + " \"endpoint\": \"qualitycheck.cn-hangzhou.aliyuncs.com\"" + + " }]," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"r-kvstore\"," + + " \"document_id\": \"60831\"," + + " \"location_service_code\": \"redisa\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"ram\"," + + " \"document_id\": \"28672\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"ram.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"rds\"," + + " \"document_id\": \"26223\"," + + " \"location_service_code\": \"rds\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"ros\"," + + " \"document_id\": \"28899\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"ros.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"sas-api\"," + + " \"document_id\": \"28498\"," + + " \"location_service_code\": \"sas\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"slb\"," + + " \"document_id\": \"27565\"," + + " \"location_service_code\": \"slb\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"sts\"," + + " \"document_id\": \"28756\"," + + " \"location_service_code\": \"\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"sts.aliyuncs.com\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"vod\"," + + " \"document_id\": \"60574\"," + + " \"location_service_code\": \"vod\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"vpc\"," + + " \"document_id\": \"34962\"," + + " \"location_service_code\": \"vpc\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }," + + " {" + + " \"code\": \"waf\"," + + " \"document_id\": \"62847\"," + + " \"location_service_code\": \"waf\"," + + " \"regional_endpoints\": []," + + " \"global_endpoint\": \"\"," + + " \"regional_endpoint_pattern\": \"\"" + + " }]" + + "}" + +var initOnce sync.Once +var data interface{} + +func getEndpointConfigData() interface{} { + initOnce.Do(func() { + err := json.Unmarshal([]byte(endpointsJson), &data) + if err != nil { + fmt.Println("init endpoint config data failed.", err) + } + }) + return data +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_global_resolver.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_global_resolver.go new file mode 100644 index 0000000000..864b7cc4c2 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_global_resolver.go @@ -0,0 +1,37 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package endpoints + +import ( + "fmt" + "github.com/jmespath/go-jmespath" + "strings" +) + +type LocalGlobalResolver struct { +} + +func (resolver *LocalGlobalResolver) TryResolve(param *ResolveParam) (endpoint string, support bool, err error) { + // get the global endpoints configs + endpointExpression := fmt.Sprintf("products[?code=='%s'].global_endpoint", strings.ToLower(param.Product)) + endpointData, err := jmespath.Search(endpointExpression, getEndpointConfigData()) + if err == nil && endpointData != nil && len(endpointData.([]interface{})) > 0 { + endpoint = endpointData.([]interface{})[0].(string) + support = len(endpoint) > 0 + return endpoint, support, nil + } + support = false + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_regional_resolver.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_regional_resolver.go new file mode 100644 index 0000000000..f8aa0f92a7 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/local_regional_resolver.go @@ -0,0 +1,41 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package endpoints + +import ( + "fmt" + "github.com/jmespath/go-jmespath" + "strings" +) + +type LocalRegionalResolver struct { +} + +func (resolver *LocalRegionalResolver) TryResolve(param *ResolveParam) (endpoint string, support bool, err error) { + // get the regional endpoints configs + regionalExpression := fmt.Sprintf("products[?code=='%s'].regional_endpoints", strings.ToLower(param.Product)) + regionalData, err := jmespath.Search(regionalExpression, getEndpointConfigData()) + if err == nil && regionalData != nil && len(regionalData.([]interface{})) > 0 { + endpointExpression := fmt.Sprintf("[0][?region=='%s'].endpoint", strings.ToLower(param.RegionId)) + endpointData, err := jmespath.Search(endpointExpression, regionalData) + if err == nil && endpointData != nil && len(endpointData.([]interface{})) > 0 { + endpoint = endpointData.([]interface{})[0].(string) + support = len(endpoint) > 0 + return endpoint, support, nil + } + } + support = false + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/location_resolver.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/location_resolver.go new file mode 100644 index 0000000000..803bc57d5a --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/location_resolver.go @@ -0,0 +1,139 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package endpoints + +import ( + "encoding/json" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "sync" + "time" +) + +const ( + EndpointCacheExpireTime = 3600 //Seconds +) + +var lastClearTimePerProduct = struct { + sync.RWMutex + cache map[string]int64 +}{cache: make(map[string]int64)} + +var endpointCache = struct { + sync.RWMutex + cache map[string]string +}{cache: make(map[string]string)} + +type LocationResolver struct { +} + +func (resolver *LocationResolver) TryResolve(param *ResolveParam) (endpoint string, support bool, err error) { + if len(param.LocationProduct) <= 0 { + support = false + return + } + + //get from cache + cacheKey := param.Product + "#" + param.RegionId + if endpointCache.cache != nil && len(endpointCache.cache[cacheKey]) > 0 && !CheckCacheIsExpire(cacheKey) { + endpoint = endpointCache.cache[cacheKey] + support = true + return + } + + //get from remote + getEndpointRequest := requests.NewCommonRequest() + + getEndpointRequest.Product = "Location" + getEndpointRequest.Version = "2015-06-12" + getEndpointRequest.ApiName = "DescribeEndpoints" + getEndpointRequest.Domain = "location.aliyuncs.com" + getEndpointRequest.Method = "GET" + getEndpointRequest.Scheme = requests.HTTPS + + getEndpointRequest.QueryParams["Id"] = param.RegionId + getEndpointRequest.QueryParams["ServiceCode"] = param.LocationProduct + if len(param.LocationEndpointType) > 0 { + getEndpointRequest.QueryParams["Type"] = param.LocationEndpointType + } else { + getEndpointRequest.QueryParams["Type"] = "openAPI" + } + + response, err := param.CommonApi(getEndpointRequest) + var getEndpointResponse GetEndpointResponse + if !response.IsSuccess() { + support = false + return + } + + json.Unmarshal([]byte(response.GetHttpContentString()), &getEndpointResponse) + if !getEndpointResponse.Success || getEndpointResponse.Endpoints == nil { + support = false + return + } + if len(getEndpointResponse.Endpoints.Endpoint) <= 0 { + support = false + return + } + if len(getEndpointResponse.Endpoints.Endpoint[0].Endpoint) > 0 { + endpoint = getEndpointResponse.Endpoints.Endpoint[0].Endpoint + endpointCache.Lock() + endpointCache.cache[cacheKey] = endpoint + endpointCache.Unlock() + lastClearTimePerProduct.Lock() + lastClearTimePerProduct.cache[cacheKey] = time.Now().Unix() + lastClearTimePerProduct.Unlock() + support = true + return + } + + support = false + return +} + +func CheckCacheIsExpire(cacheKey string) bool { + lastClearTime := lastClearTimePerProduct.cache[cacheKey] + if lastClearTime <= 0 { + lastClearTime = time.Now().Unix() + lastClearTimePerProduct.Lock() + lastClearTimePerProduct.cache[cacheKey] = lastClearTime + lastClearTimePerProduct.Unlock() + } + + now := time.Now().Unix() + elapsedTime := now - lastClearTime + if elapsedTime > EndpointCacheExpireTime { + return true + } + + return false +} + +type GetEndpointResponse struct { + Endpoints *EndpointsObj + RequestId string + Success bool +} + +type EndpointsObj struct { + Endpoint []EndpointObj +} + +type EndpointObj struct { + Protocols map[string]string + Type string + Namespace string + Id string + SerivceCode string + Endpoint string +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/mapping_resolver.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/mapping_resolver.go new file mode 100644 index 0000000000..f7b5a1aa23 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/mapping_resolver.go @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package endpoints + +import ( + "fmt" + "strings" +) + +const keyFormatter = "%s::%s" + +var endpointMapping = make(map[string]string) + +func AddEndpointMapping(regionId, productId, endpoint string) (err error) { + key := fmt.Sprintf(keyFormatter, strings.ToLower(regionId), strings.ToLower(productId)) + endpointMapping[key] = endpoint + return nil +} + +type MappingResolver struct { +} + +func (resolver *MappingResolver) TryResolve(param *ResolveParam) (endpoint string, support bool, err error) { + key := fmt.Sprintf(keyFormatter, strings.ToLower(param.RegionId), strings.ToLower(param.Product)) + endpoint, contains := endpointMapping[key] + return endpoint, contains, nil +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/resolver.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/resolver.go new file mode 100644 index 0000000000..e58cfbf283 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/resolver.go @@ -0,0 +1,80 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package endpoints + +import ( + "encoding/json" + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" + "sync" +) + +const ( + ResolveEndpointUserGuideLink = "" +) + +var once sync.Once +var resolvers []Resolver + +type Resolver interface { + TryResolve(param *ResolveParam) (endpoint string, support bool, err error) +} + +func Resolve(param *ResolveParam) (endpoint string, err error) { + supportedResolvers := getAllResolvers() + for _, resolver := range supportedResolvers { + endpoint, supported, err := resolver.TryResolve(param) + if supported { + return endpoint, err + } + } + + // not support + errorMsg := fmt.Sprintf(errors.CanNotResolveEndpointErrorMessage, param, ResolveEndpointUserGuideLink) + err = errors.NewClientError(errors.CanNotResolveEndpointErrorCode, errorMsg, nil) + return +} + +func getAllResolvers() []Resolver { + once.Do(func() { + resolvers = []Resolver{ + &SimpleHostResolver{}, + &MappingResolver{}, + &LocationResolver{}, + &LocalRegionalResolver{}, + &LocalGlobalResolver{}, + } + }) + return resolvers +} + +type ResolveParam struct { + Domain string + Product string + RegionId string + LocationProduct string + LocationEndpointType string + CommonApi func(request *requests.CommonRequest) (response *responses.CommonResponse, err error) `json:"-"` +} + +func (param *ResolveParam) String() string { + jsonBytes, err := json.Marshal(param) + if err != nil { + return fmt.Sprint("ResolveParam.String() process error:", err) + } + return string(jsonBytes) +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/simple_host_resolver.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/simple_host_resolver.go new file mode 100644 index 0000000000..3e2e731ea1 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints/simple_host_resolver.go @@ -0,0 +1,25 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package endpoints + +type SimpleHostResolver struct { +} + +func (resolver *SimpleHostResolver) TryResolve(param *ResolveParam) (endpoint string, support bool, err error) { + if support = len(param.Domain) > 0; support { + endpoint = param.Domain + } + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/client_error.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/client_error.go new file mode 100644 index 0000000000..88ebc1564d --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/client_error.go @@ -0,0 +1,92 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package errors + +import "fmt" + +const ( + DefaultClientErrorStatus = 400 + DefaultClientErrorCode = "SDK.ClientError" + + UnsupportedCredentialErrorCode = "SDK.UnsupportedCredential" + UnsupportedCredentialErrorMessage = "Specified credential (type = %s) is not supported, please check" + + CanNotResolveEndpointErrorCode = "SDK.CanNotResolveEndpoint" + CanNotResolveEndpointErrorMessage = "Can not resolve endpoint(param = %s), please check the user guide\n %s" + + UnsupportedParamPositionErrorCode = "SDK.UnsupportedParamPosition" + UnsupportedParamPositionErrorMessage = "Specified param position (%s) is not supported, please upgrade sdk and retry" + + AsyncFunctionNotEnabledCode = "SDK.AsyncFunctionNotEnabled" + AsyncFunctionNotEnabledMessage = "Async function is not enabled in client, please invoke 'client.EnableAsync' function" + + UnknownRequestTypeErrorCode = "SDK.UnknownRequestType" + UnknownRequestTypeErrorMessage = "Unknown Request Type: %s" + + MissingParamErrorCode = "SDK.MissingParam" + InvalidParamErrorCode = "SDK.InvalidParam" + + JsonUnmarshalErrorCode = "SDK.JsonUnmarshalError" + JsonUnmarshalErrorMessage = "Failed to unmarshal response, but you can get the data via response.GetHttpStatusCode() and response.GetHttpContentString()" + + TimeoutErrorCode = "SDK.TimeoutError" + TimeoutErrorMessage = "The request timed out %s times(%s for retry), perhaps we should have the threshold raised a little?" +) + +type ClientError struct { + errorCode string + message string + originError error +} + +func NewClientError(errorCode, message string, originErr error) Error { + return &ClientError{ + errorCode: errorCode, + message: message, + originError: originErr, + } +} + +func (err *ClientError) Error() string { + clientErrMsg := fmt.Sprintf("[%s] %s", err.errorCode, err.message) + if err.originError != nil { + return clientErrMsg + "\ncaused by:\n" + err.originError.Error() + } + return clientErrMsg +} + +func (err *ClientError) OriginError() error { + return err.originError +} + +func (*ClientError) HttpStatus() int { + return DefaultClientErrorStatus +} + +func (err *ClientError) ErrorCode() string { + if err.errorCode == "" { + return DefaultClientErrorCode + } else { + return err.errorCode + } +} + +func (err *ClientError) Message() string { + return err.message +} + +func (err *ClientError) String() string { + return err.Error() +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/error.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/error.go new file mode 100644 index 0000000000..49962f3b5e --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/error.go @@ -0,0 +1,23 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package errors + +type Error interface { + error + HttpStatus() int + ErrorCode() string + Message() string + OriginError() error +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/server_error.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/server_error.go new file mode 100644 index 0000000000..d90638c0a6 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/server_error.go @@ -0,0 +1,122 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package errors + +import ( + "encoding/json" + "fmt" + "github.com/jmespath/go-jmespath" +) + +var wrapperList = []ServerErrorWrapper{ + &SignatureDostNotMatchWrapper{}, +} + +type ServerError struct { + httpStatus int + requestId string + hostId string + errorCode string + recommend string + message string + comment string +} + +type ServerErrorWrapper interface { + tryWrap(error *ServerError, wrapInfo map[string]string) (bool, *ServerError) +} + +func (err *ServerError) Error() string { + return fmt.Sprintf("SDK.ServerError\nErrorCode: %s\nRecommend: %s\nRequestId: %s\nMessage: %s", + err.errorCode, err.comment+err.recommend, err.requestId, err.message) +} + +func NewServerError(httpStatus int, responseContent, comment string) Error { + result := &ServerError{ + httpStatus: httpStatus, + message: responseContent, + comment: comment, + } + + var data interface{} + err := json.Unmarshal([]byte(responseContent), &data) + if err == nil { + requestId, _ := jmespath.Search("RequestId", data) + hostId, _ := jmespath.Search("HostId", data) + errorCode, _ := jmespath.Search("Code", data) + recommend, _ := jmespath.Search("Recommend", data) + message, _ := jmespath.Search("Message", data) + + if requestId != nil { + result.requestId = requestId.(string) + } + if hostId != nil { + result.hostId = hostId.(string) + } + if errorCode != nil { + result.errorCode = errorCode.(string) + } + if recommend != nil { + result.recommend = recommend.(string) + } + if message != nil { + result.message = message.(string) + } + } + + return result +} + +func WrapServerError(originError *ServerError, wrapInfo map[string]string) *ServerError { + for _, wrapper := range wrapperList { + ok, newError := wrapper.tryWrap(originError, wrapInfo) + if ok { + return newError + } + } + return originError +} + +func (err *ServerError) HttpStatus() int { + return err.httpStatus +} + +func (err *ServerError) ErrorCode() string { + return err.errorCode +} + +func (err *ServerError) Message() string { + return err.message +} + +func (err *ServerError) OriginError() error { + return nil +} + +func (err *ServerError) HostId() string { + return err.hostId +} + +func (err *ServerError) RequestId() string { + return err.requestId +} + +func (err *ServerError) Recommend() string { + return err.recommend +} + +func (err *ServerError) Comment() string { + return err.comment +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/signature_does_not_match_wrapper.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/signature_does_not_match_wrapper.go new file mode 100644 index 0000000000..33b3e4c446 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors/signature_does_not_match_wrapper.go @@ -0,0 +1,29 @@ +package errors + +import "strings" + +const SignatureDostNotMatchErrorCode = "SignatureDoesNotMatch" +const MessagePrefix = "Specified signature is not matched with our calculation. server string to sign is:" + +type SignatureDostNotMatchWrapper struct { +} + +func (*SignatureDostNotMatchWrapper) tryWrap(error *ServerError, wrapInfo map[string]string) (bool, *ServerError) { + clientStringToSign := wrapInfo["StringToSign"] + if error.errorCode == SignatureDostNotMatchErrorCode && clientStringToSign != "" { + message := error.message + if strings.HasPrefix(message, MessagePrefix) { + serverStringToSign := message[len(MessagePrefix):] + if clientStringToSign == serverStringToSign { + // user secret is error + error.recommend = "Please check you AccessKeySecret" + } else { + error.recommend = "This may be a bug with the SDK and we hope you can submit this question in the " + + "github issue(https://github.com/aliyun/alibaba-cloud-sdk-go/issues), thanks very much" + } + } + return true, error + } else { + return false, nil + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/acs_reqeust.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/acs_reqeust.go new file mode 100644 index 0000000000..9ed9c3e687 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/acs_reqeust.go @@ -0,0 +1,308 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package requests + +import ( + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "io" + "reflect" + "strconv" +) + +const ( + RPC = "RPC" + ROA = "ROA" + + HTTP = "HTTP" + HTTPS = "HTTPS" + + DefaultHttpPort = "80" + + GET = "GET" + PUT = "PUT" + POST = "POST" + DELETE = "DELETE" + HEAD = "HEAD" + OPTIONS = "OPTIONS" + + Json = "application/json" + Xml = "application/xml" + Raw = "application/octet-stream" + Form = "application/x-www-form-urlencoded" + + Header = "Header" + Query = "Query" + Body = "Body" + Path = "Path" + + HeaderSeparator = "\n" +) + +// interface +type AcsRequest interface { + GetScheme() string + GetMethod() string + GetDomain() string + GetPort() string + GetRegionId() string + GetUrl() string + GetQueries() string + GetHeaders() map[string]string + GetQueryParams() map[string]string + GetFormParams() map[string]string + GetContent() []byte + GetBodyReader() io.Reader + GetStyle() string + GetProduct() string + GetVersion() string + GetActionName() string + GetAcceptFormat() string + GetLocationServiceCode() string + GetLocationEndpointType() string + + SetStringToSign(stringToSign string) + GetStringToSign() string + + SetDomain(domain string) + SetContent(content []byte) + SetScheme(scheme string) + BuildUrl() string + BuildQueries() string + + addHeaderParam(key, value string) + addQueryParam(key, value string) + addFormParam(key, value string) + addPathParam(key, value string) +} + +// base class +type baseRequest struct { + Scheme string + Method string + Domain string + Port string + RegionId string + + product string + version string + + actionName string + + AcceptFormat string + + QueryParams map[string]string + Headers map[string]string + FormParams map[string]string + Content []byte + + locationServiceCode string + locationEndpointType string + + queries string + + stringToSign string +} + +func (request *baseRequest) GetQueryParams() map[string]string { + return request.QueryParams +} + +func (request *baseRequest) GetFormParams() map[string]string { + return request.FormParams +} + +func (request *baseRequest) GetContent() []byte { + return request.Content +} + +func (request *baseRequest) GetVersion() string { + return request.version +} + +func (request *baseRequest) GetActionName() string { + return request.actionName +} + +func (request *baseRequest) SetContent(content []byte) { + request.Content = content +} + +func (request *baseRequest) addHeaderParam(key, value string) { + request.Headers[key] = value +} + +func (request *baseRequest) addQueryParam(key, value string) { + request.QueryParams[key] = value +} + +func (request *baseRequest) addFormParam(key, value string) { + request.FormParams[key] = value +} + +func (request *baseRequest) GetAcceptFormat() string { + return request.AcceptFormat +} + +func (request *baseRequest) GetLocationServiceCode() string { + return request.locationServiceCode +} + +func (request *baseRequest) GetLocationEndpointType() string { + return request.locationEndpointType +} + +func (request *baseRequest) GetProduct() string { + return request.product +} + +func (request *baseRequest) GetScheme() string { + return request.Scheme +} + +func (request *baseRequest) SetScheme(scheme string) { + request.Scheme = scheme +} + +func (request *baseRequest) GetMethod() string { + return request.Method +} + +func (request *baseRequest) GetDomain() string { + return request.Domain +} + +func (request *baseRequest) SetDomain(host string) { + request.Domain = host +} + +func (request *baseRequest) GetPort() string { + return request.Port +} + +func (request *baseRequest) GetRegionId() string { + return request.RegionId +} + +func (request *baseRequest) GetHeaders() map[string]string { + return request.Headers +} + +func (request *baseRequest) SetContentType(contentType string) { + request.Headers["Content-Type"] = contentType +} + +func (request *baseRequest) GetContentType() (contentType string, contains bool) { + contentType, contains = request.Headers["Content-Type"] + return +} + +func (request *baseRequest) SetStringToSign(stringToSign string) { + request.stringToSign = stringToSign +} + +func (request *baseRequest) GetStringToSign() string { + return request.stringToSign +} + +func defaultBaseRequest() (request *baseRequest) { + request = &baseRequest{ + Scheme: "", + AcceptFormat: "JSON", + Method: GET, + QueryParams: make(map[string]string), + Headers: map[string]string{ + "x-sdk-client": "golang/1.0.0", + "x-sdk-invoke-type": "normal", + }, + FormParams: make(map[string]string), + } + return +} + +func InitParams(request AcsRequest) (err error) { + requestValue := reflect.ValueOf(request).Elem() + err = flatRepeatedList(requestValue, request, "", "") + return +} + +func flatRepeatedList(dataValue reflect.Value, request AcsRequest, position, prefix string) (err error) { + dataType := dataValue.Type() + for i := 0; i < dataType.NumField(); i++ { + field := dataType.Field(i) + name, containsNameTag := field.Tag.Lookup("name") + fieldPosition := position + if fieldPosition == "" { + fieldPosition, _ = field.Tag.Lookup("position") + } + typeTag, containsTypeTag := field.Tag.Lookup("type") + if containsNameTag { + if !containsTypeTag { + // simple param + key := prefix + name + value := dataValue.Field(i).String() + err = addParam(request, fieldPosition, key, value) + if err != nil { + return + } + } else if typeTag == "Repeated" { + // repeated param + repeatedFieldValue := dataValue.Field(i) + if repeatedFieldValue.Kind() != reflect.Slice { + // possible value: {"[]string", "*[]struct"}, we must call Elem() in the last condition + repeatedFieldValue = repeatedFieldValue.Elem() + } + if repeatedFieldValue.IsValid() && !repeatedFieldValue.IsNil() { + for m := 0; m < repeatedFieldValue.Len(); m++ { + elementValue := repeatedFieldValue.Index(m) + key := prefix + name + "." + strconv.Itoa(m+1) + if elementValue.Type().String() == "string" { + value := elementValue.String() + err = addParam(request, fieldPosition, key, value) + if err != nil { + return + } + } else { + err = flatRepeatedList(elementValue, request, fieldPosition, key+".") + if err != nil { + return + } + } + } + } + } + } + } + return +} + +func addParam(request AcsRequest, position, name, value string) (err error) { + if len(value) > 0 { + switch position { + case Header: + request.addHeaderParam(name, value) + case Query: + request.addQueryParam(name, value) + case Path: + request.addPathParam(name, value) + case Body: + request.addFormParam(name, value) + default: + errMsg := fmt.Sprintf(errors.UnsupportedParamPositionErrorMessage, position) + err = errors.NewClientError(errors.UnsupportedParamPositionErrorCode, errMsg, nil) + } + } + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/common_request.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/common_request.go new file mode 100644 index 0000000000..d5d841b42d --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/common_request.go @@ -0,0 +1,128 @@ +package requests + +import ( + "bytes" + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "io" + "strings" +) + +type CommonRequest struct { + *baseRequest + + Version string + ApiName string + Product string + + // roa params + PathPattern string + PathParams map[string]string + + Ontology AcsRequest +} + +func NewCommonRequest() (request *CommonRequest) { + request = &CommonRequest{ + baseRequest: defaultBaseRequest(), + } + request.Headers["x-sdk-invoke-type"] = "common" + request.PathParams = make(map[string]string) + return +} + +func (request *CommonRequest) String() string { + request.TransToAcsRequest() + request.BuildQueries() + request.BuildUrl() + + resultBuilder := bytes.Buffer{} + + mapOutput := func(m map[string]string) { + if len(m) > 0 { + for key, value := range m { + resultBuilder.WriteString(key + ": " + value + "\n") + } + } + } + + // Request Line + resultBuilder.WriteString("\n") + resultBuilder.WriteString(fmt.Sprintf("%s %s %s/1.1\n", request.Method, request.GetQueries(), strings.ToUpper(request.Scheme))) + + // Headers + resultBuilder.WriteString("Host" + ": " + request.Domain + "\n") + mapOutput(request.Headers) + + resultBuilder.WriteString("\n") + // Body + if len(request.Content) > 0 { + resultBuilder.WriteString(string(request.Content) + "\n") + } else { + mapOutput(request.FormParams) + } + + return resultBuilder.String() +} + +func (request *CommonRequest) TransToAcsRequest() { + if len(request.Version) == 0 { + errors.NewClientError(errors.MissingParamErrorCode, "Common request [version] is required", nil) + } + if len(request.ApiName) == 0 && len(request.PathPattern) == 0 { + errors.NewClientError(errors.MissingParamErrorCode, "At least one of [ApiName] and [PathPattern] should has a value", nil) + } + if len(request.Domain) == 0 && len(request.Product) == 0 { + errors.NewClientError(errors.MissingParamErrorCode, "At least one of [Domain] and [Product] should has a value", nil) + } + + if len(request.PathPattern) > 0 { + roaRequest := &RoaRequest{} + roaRequest.initWithCommonRequest(request) + request.Ontology = roaRequest + } else { + rpcRequest := &RpcRequest{} + rpcRequest.baseRequest = request.baseRequest + rpcRequest.product = request.Product + rpcRequest.version = request.Version + rpcRequest.actionName = request.ApiName + request.Ontology = rpcRequest + } + +} + +func (request *CommonRequest) BuildUrl() string { + if len(request.Port) > 0 { + return strings.ToLower(request.Scheme) + "://" + request.Domain + ":" + request.Port + request.BuildQueries() + } + + return strings.ToLower(request.Scheme) + "://" + request.Domain + request.BuildQueries() +} + +func (request *CommonRequest) BuildQueries() string { + return request.Ontology.BuildQueries() +} + +func (request *CommonRequest) GetUrl() string { + if len(request.Port) > 0 { + return strings.ToLower(request.Scheme) + "://" + request.Domain + ":" + request.Port + request.GetQueries() + } + + return strings.ToLower(request.Scheme) + "://" + request.Domain + request.GetQueries() +} + +func (request *CommonRequest) GetQueries() string { + return request.Ontology.GetQueries() +} + +func (request *CommonRequest) GetBodyReader() io.Reader { + return request.Ontology.GetBodyReader() +} + +func (request *CommonRequest) GetStyle() string { + return request.Ontology.GetStyle() +} + +func (request *CommonRequest) addPathParam(key, value string) { + request.PathParams[key] = value +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/roa_request.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/roa_request.go new file mode 100644 index 0000000000..395178f3b6 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/roa_request.go @@ -0,0 +1,144 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package requests + +import ( + "bytes" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils" + "io" + "net/url" + "sort" + "strings" +) + +type RoaRequest struct { + *baseRequest + pathPattern string + PathParams map[string]string +} + +func (*RoaRequest) GetStyle() string { + return ROA +} + +func (request *RoaRequest) GetBodyReader() io.Reader { + if request.FormParams != nil && len(request.FormParams) > 0 { + formString := utils.GetUrlFormedMap(request.FormParams) + return strings.NewReader(formString) + } else if len(request.Content) > 0 { + return bytes.NewReader(request.Content) + } else { + return nil + } +} + +func (request *RoaRequest) GetQueries() string { + return request.queries +} + +// for sign method, need not url encoded +func (request *RoaRequest) BuildQueries() string { + return request.buildQueries(false) +} + +func (request *RoaRequest) buildQueries(needParamEncode bool) string { + // replace path params with value + path := request.pathPattern + for key, value := range request.PathParams { + path = strings.Replace(path, "["+key+"]", value, 1) + } + + queryParams := request.QueryParams + // check if path contains params + splitArray := strings.Split(path, "?") + path = splitArray[0] + if len(splitArray) > 1 && len(splitArray[1]) > 0 { + queryParams[splitArray[1]] = "" + } + // sort QueryParams by key + var queryKeys []string + for key := range queryParams { + queryKeys = append(queryKeys, key) + } + sort.Strings(queryKeys) + + // append urlBuilder + urlBuilder := bytes.Buffer{} + urlBuilder.WriteString(path) + urlBuilder.WriteString("?") + for i := 0; i < len(queryKeys); i++ { + queryKey := queryKeys[i] + urlBuilder.WriteString(queryKey) + if value := queryParams[queryKey]; len(value) > 0 { + urlBuilder.WriteString("=") + if needParamEncode { + urlBuilder.WriteString(url.QueryEscape(value)) + } else { + urlBuilder.WriteString(value) + } + } + if i < len(queryKeys)-1 { + urlBuilder.WriteString("&") + } + } + result := urlBuilder.String() + result = popStandardUrlencode(result) + request.queries = result + return request.queries +} + +func popStandardUrlencode(stringToSign string) (result string) { + result = strings.Replace(stringToSign, "+", "%20", -1) + result = strings.Replace(result, "*", "%2A", -1) + result = strings.Replace(result, "%7E", "~", -1) + return +} + +func (request *RoaRequest) GetUrl() string { + return strings.ToLower(request.Scheme) + "://" + request.Domain + ":" + request.Port + request.GetQueries() +} + +func (request *RoaRequest) BuildUrl() string { + // for network trans, need url encoded + return strings.ToLower(request.Scheme) + "://" + request.Domain + ":" + request.Port + request.buildQueries(true) +} + +func (request *RoaRequest) addPathParam(key, value string) { + request.PathParams[key] = value +} + +func (request *RoaRequest) InitWithApiInfo(product, version, action, uriPattern, serviceCode, endpointType string) { + request.baseRequest = defaultBaseRequest() + request.PathParams = make(map[string]string) + request.Headers["x-acs-version"] = version + request.pathPattern = uriPattern + request.locationServiceCode = serviceCode + request.locationEndpointType = endpointType + //request.product = product + //request.version = version + //request.actionName = action +} + +func (request *RoaRequest) initWithCommonRequest(commonRequest *CommonRequest) { + request.baseRequest = commonRequest.baseRequest + request.PathParams = commonRequest.PathParams + //request.product = commonRequest.Product + //request.version = commonRequest.Version + request.Headers["x-acs-version"] = commonRequest.Version + //request.actionName = commonRequest.ApiName + request.pathPattern = commonRequest.PathPattern + request.locationServiceCode = "" + request.locationEndpointType = "" +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/rpc_request.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/rpc_request.go new file mode 100644 index 0000000000..15fe2faf82 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/rpc_request.go @@ -0,0 +1,81 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package requests + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils" + "io" + "strings" +) + +type RpcRequest struct { + *baseRequest +} + +func (request *RpcRequest) init() { + request.baseRequest = defaultBaseRequest() + request.Method = POST +} + +func (*RpcRequest) GetStyle() string { + return RPC +} + +func (request *RpcRequest) GetBodyReader() io.Reader { + if request.FormParams != nil && len(request.FormParams) > 0 { + formString := utils.GetUrlFormedMap(request.FormParams) + return strings.NewReader(formString) + } else { + return strings.NewReader("") + } +} + +func (request *RpcRequest) BuildQueries() string { + request.queries = "/?" + utils.GetUrlFormedMap(request.QueryParams) + return request.queries +} + +func (request *RpcRequest) GetQueries() string { + return request.queries +} + +func (request *RpcRequest) BuildUrl() string { + return strings.ToLower(request.Scheme) + "://" + request.Domain + request.BuildQueries() +} + +func (request *RpcRequest) GetUrl() string { + return strings.ToLower(request.Scheme) + "://" + request.Domain + request.GetQueries() +} + +func (request *RpcRequest) GetVersion() string { + return request.version +} + +func (request *RpcRequest) GetActionName() string { + return request.actionName +} + +func (request *RpcRequest) addPathParam(key, value string) { + panic("not support") +} + +func (request *RpcRequest) InitWithApiInfo(product, version, action, serviceCode, endpointType string) { + request.init() + request.product = product + request.version = version + request.actionName = action + request.locationServiceCode = serviceCode + request.locationEndpointType = endpointType +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/types.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/types.go new file mode 100644 index 0000000000..676238cf6e --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests/types.go @@ -0,0 +1,53 @@ +package requests + +import "strconv" + +type Integer string + +func NewInteger(integer int) Integer { + return Integer(strconv.Itoa(integer)) +} + +func (integer Integer) hasValue() bool { + return integer != "" +} + +func (integer Integer) getValue() (int, error) { + return strconv.Atoi(string(integer)) +} + +func NewInteger64(integer int64) Integer { + return Integer(strconv.FormatInt(integer, 10)) +} + +func (integer Integer) getValue64() (int64, error) { + return strconv.ParseInt(string(integer), 10, 0) +} + +type Boolean string + +func NewBoolean(bool bool) Boolean { + return Boolean(strconv.FormatBool(bool)) +} + +func (boolean Boolean) hasValue() bool { + return boolean != "" +} + +func (boolean Boolean) getValue() (bool, error) { + return strconv.ParseBool(string(boolean)) +} + +type Float string + +func NewFloat(f float64) Float { + return Float(strconv.FormatFloat(f, 'f', 6, 64)) +} + +func (float Float) hasValue() bool { + return float != "" +} + +func (float Float) getValue() (float64, error) { + return strconv.ParseFloat(string(float), 64) +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/json_parser.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/json_parser.go new file mode 100644 index 0000000000..43ba6fc269 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/json_parser.go @@ -0,0 +1,341 @@ +package responses + +import ( + "encoding/json" + "github.com/json-iterator/go" + "io" + "math" + "strconv" + "strings" + "sync" + "unsafe" +) + +const maxUint = ^uint(0) +const maxInt = int(maxUint >> 1) +const minInt = -maxInt - 1 + +var jsonParser jsoniter.API +var initJson = &sync.Once{} + +func initJsonParserOnce() { + initJson.Do(func() { + registerBetterFuzzyDecoder() + jsonParser = jsoniter.ConfigCompatibleWithStandardLibrary + }) +} + +func registerBetterFuzzyDecoder() { + jsoniter.RegisterTypeDecoder("string", &nullableFuzzyStringDecoder{}) + jsoniter.RegisterTypeDecoder("bool", &fuzzyBoolDecoder{}) + jsoniter.RegisterTypeDecoder("float32", &nullableFuzzyFloat32Decoder{}) + jsoniter.RegisterTypeDecoder("float64", &nullableFuzzyFloat64Decoder{}) + jsoniter.RegisterTypeDecoder("int", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(maxInt) || val < float64(minInt) { + iter.ReportError("fuzzy decode int", "exceed range") + return + } + *((*int)(ptr)) = int(val) + } else { + *((*int)(ptr)) = iter.ReadInt() + } + }}) + jsoniter.RegisterTypeDecoder("uint", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(maxUint) || val < 0 { + iter.ReportError("fuzzy decode uint", "exceed range") + return + } + *((*uint)(ptr)) = uint(val) + } else { + *((*uint)(ptr)) = iter.ReadUint() + } + }}) + jsoniter.RegisterTypeDecoder("int8", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt8) || val < float64(math.MinInt8) { + iter.ReportError("fuzzy decode int8", "exceed range") + return + } + *((*int8)(ptr)) = int8(val) + } else { + *((*int8)(ptr)) = iter.ReadInt8() + } + }}) + jsoniter.RegisterTypeDecoder("uint8", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint8) || val < 0 { + iter.ReportError("fuzzy decode uint8", "exceed range") + return + } + *((*uint8)(ptr)) = uint8(val) + } else { + *((*uint8)(ptr)) = iter.ReadUint8() + } + }}) + jsoniter.RegisterTypeDecoder("int16", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt16) || val < float64(math.MinInt16) { + iter.ReportError("fuzzy decode int16", "exceed range") + return + } + *((*int16)(ptr)) = int16(val) + } else { + *((*int16)(ptr)) = iter.ReadInt16() + } + }}) + jsoniter.RegisterTypeDecoder("uint16", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint16) || val < 0 { + iter.ReportError("fuzzy decode uint16", "exceed range") + return + } + *((*uint16)(ptr)) = uint16(val) + } else { + *((*uint16)(ptr)) = iter.ReadUint16() + } + }}) + jsoniter.RegisterTypeDecoder("int32", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt32) || val < float64(math.MinInt32) { + iter.ReportError("fuzzy decode int32", "exceed range") + return + } + *((*int32)(ptr)) = int32(val) + } else { + *((*int32)(ptr)) = iter.ReadInt32() + } + }}) + jsoniter.RegisterTypeDecoder("uint32", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint32) || val < 0 { + iter.ReportError("fuzzy decode uint32", "exceed range") + return + } + *((*uint32)(ptr)) = uint32(val) + } else { + *((*uint32)(ptr)) = iter.ReadUint32() + } + }}) + jsoniter.RegisterTypeDecoder("int64", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt64) || val < float64(math.MinInt64) { + iter.ReportError("fuzzy decode int64", "exceed range") + return + } + *((*int64)(ptr)) = int64(val) + } else { + *((*int64)(ptr)) = iter.ReadInt64() + } + }}) + jsoniter.RegisterTypeDecoder("uint64", &nullableFuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint64) || val < 0 { + iter.ReportError("fuzzy decode uint64", "exceed range") + return + } + *((*uint64)(ptr)) = uint64(val) + } else { + *((*uint64)(ptr)) = iter.ReadUint64() + } + }}) +} + +type nullableFuzzyStringDecoder struct { +} + +func (decoder *nullableFuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + switch valueType { + case jsoniter.NumberValue: + var number json.Number + iter.ReadVal(&number) + *((*string)(ptr)) = string(number) + case jsoniter.StringValue: + *((*string)(ptr)) = iter.ReadString() + case jsoniter.BoolValue: + *((*string)(ptr)) = strconv.FormatBool(iter.ReadBool()) + case jsoniter.NilValue: + iter.ReadNil() + *((*string)(ptr)) = "" + default: + iter.ReportError("fuzzyStringDecoder", "not number or string or bool") + } +} + +type fuzzyBoolDecoder struct { +} + +func (decoder *fuzzyBoolDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + switch valueType { + case jsoniter.BoolValue: + *((*bool)(ptr)) = iter.ReadBool() + case jsoniter.NumberValue: + var number json.Number + iter.ReadVal(&number) + num, err := number.Int64() + if err != nil { + iter.ReportError("fuzzyBoolDecoder", "get value from json.number failed") + } + if num == 0 { + *((*bool)(ptr)) = false + } else { + *((*bool)(ptr)) = true + } + case jsoniter.StringValue: + strValue := strings.ToLower(iter.ReadString()) + if strValue == "true" { + *((*bool)(ptr)) = true + } else if strValue == "false" || strValue == "" { + *((*bool)(ptr)) = false + } else { + iter.ReportError("fuzzyBoolDecoder", "unsupported bool value: "+strValue) + } + case jsoniter.NilValue: + iter.ReadNil() + *((*bool)(ptr)) = false + default: + iter.ReportError("fuzzyBoolDecoder", "not number or string or nil") + } +} + +type tolerateEmptyArrayDecoder struct { + valDecoder jsoniter.ValDecoder +} + +func (decoder *tolerateEmptyArrayDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if iter.WhatIsNext() == jsoniter.ArrayValue { + iter.Skip() + newIter := iter.Pool().BorrowIterator([]byte("{}")) + defer iter.Pool().ReturnIterator(newIter) + decoder.valDecoder.Decode(ptr, newIter) + } else { + decoder.valDecoder.Decode(ptr, iter) + } +} + +type nullableFuzzyIntegerDecoder struct { + fun func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) +} + +func (decoder *nullableFuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + var str string + switch valueType { + case jsoniter.NumberValue: + var number json.Number + iter.ReadVal(&number) + str = string(number) + case jsoniter.StringValue: + str = iter.ReadString() + // support empty string + if str == "" { + str = "0" + } + case jsoniter.BoolValue: + if iter.ReadBool() { + str = "1" + } else { + str = "0" + } + case jsoniter.NilValue: + iter.ReadNil() + str = "0" + default: + iter.ReportError("fuzzyIntegerDecoder", "not number or string") + } + newIter := iter.Pool().BorrowIterator([]byte(str)) + defer iter.Pool().ReturnIterator(newIter) + isFloat := strings.IndexByte(str, '.') != -1 + decoder.fun(isFloat, ptr, newIter) + if newIter.Error != nil && newIter.Error != io.EOF { + iter.Error = newIter.Error + } +} + +type nullableFuzzyFloat32Decoder struct { +} + +func (decoder *nullableFuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + var str string + switch valueType { + case jsoniter.NumberValue: + *((*float32)(ptr)) = iter.ReadFloat32() + case jsoniter.StringValue: + str = iter.ReadString() + // support empty string + if str == "" { + *((*float32)(ptr)) = 0 + return + } + newIter := iter.Pool().BorrowIterator([]byte(str)) + defer iter.Pool().ReturnIterator(newIter) + *((*float32)(ptr)) = newIter.ReadFloat32() + if newIter.Error != nil && newIter.Error != io.EOF { + iter.Error = newIter.Error + } + case jsoniter.BoolValue: + // support bool to float32 + if iter.ReadBool() { + *((*float32)(ptr)) = 1 + } else { + *((*float32)(ptr)) = 0 + } + case jsoniter.NilValue: + iter.ReadNil() + *((*float32)(ptr)) = 0 + default: + iter.ReportError("nullableFuzzyFloat32Decoder", "not number or string") + } +} + +type nullableFuzzyFloat64Decoder struct { +} + +func (decoder *nullableFuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + var str string + switch valueType { + case jsoniter.NumberValue: + *((*float64)(ptr)) = iter.ReadFloat64() + case jsoniter.StringValue: + str = iter.ReadString() + // support empty string + if str == "" { + *((*float64)(ptr)) = 0 + return + } + newIter := iter.Pool().BorrowIterator([]byte(str)) + defer iter.Pool().ReturnIterator(newIter) + *((*float64)(ptr)) = newIter.ReadFloat64() + if newIter.Error != nil && newIter.Error != io.EOF { + iter.Error = newIter.Error + } + case jsoniter.BoolValue: + // support bool to float64 + if iter.ReadBool() { + *((*float64)(ptr)) = 1 + } else { + *((*float64)(ptr)) = 0 + } + case jsoniter.NilValue: + // support empty string + iter.ReadNil() + *((*float64)(ptr)) = 0 + default: + iter.ReportError("nullableFuzzyFloat32Decoder", "not number or string") + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/response.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/response.go new file mode 100644 index 0000000000..9716df173a --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses/response.go @@ -0,0 +1,137 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package responses + +import ( + "bytes" + "encoding/xml" + "fmt" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/errors" + "io/ioutil" + "net/http" + "strings" +) + +type AcsResponse interface { + IsSuccess() bool + GetHttpStatus() int + GetHttpHeaders() map[string][]string + GetHttpContentString() string + GetHttpContentBytes() []byte + GetOriginHttpResponse() *http.Response + parseFromHttpResponse(httpResponse *http.Response) error +} + +func Unmarshal(response AcsResponse, httpResponse *http.Response, format string) (err error) { + err = response.parseFromHttpResponse(httpResponse) + if err != nil { + return + } + if !response.IsSuccess() { + err = errors.NewServerError(response.GetHttpStatus(), response.GetHttpContentString(), "") + return + } + if _, isCommonResponse := response.(CommonResponse); isCommonResponse { + // common response need not unmarshal + return + } + if strings.ToUpper(format) == "JSON" { + initJsonParserOnce() + err = jsonParser.Unmarshal(response.GetHttpContentBytes(), response) + if err != nil { + err = errors.NewClientError(errors.JsonUnmarshalErrorCode, errors.JsonUnmarshalErrorMessage, err) + } + } else if strings.ToUpper(format) == "XML" { + err = xml.Unmarshal(response.GetHttpContentBytes(), response) + } + return +} + +type BaseResponse struct { + httpStatus int + httpHeaders map[string][]string + httpContentString string + httpContentBytes []byte + originHttpResponse *http.Response +} + +func (baseResponse *BaseResponse) GetHttpStatus() int { + return baseResponse.httpStatus +} + +func (baseResponse *BaseResponse) GetHttpHeaders() map[string][]string { + return baseResponse.httpHeaders +} + +func (baseResponse *BaseResponse) GetHttpContentString() string { + return baseResponse.httpContentString +} + +func (baseResponse *BaseResponse) GetHttpContentBytes() []byte { + return baseResponse.httpContentBytes +} + +func (baseResponse *BaseResponse) GetOriginHttpResponse() *http.Response { + return baseResponse.originHttpResponse +} + +func (baseResponse *BaseResponse) IsSuccess() bool { + if baseResponse.GetHttpStatus() >= 200 && baseResponse.GetHttpStatus() < 300 { + return true + } + + return false +} + +func (baseResponse *BaseResponse) parseFromHttpResponse(httpResponse *http.Response) (err error) { + defer httpResponse.Body.Close() + body, err := ioutil.ReadAll(httpResponse.Body) + if err != nil { + return + } + baseResponse.httpStatus = httpResponse.StatusCode + baseResponse.httpHeaders = httpResponse.Header + baseResponse.httpContentBytes = body + baseResponse.httpContentString = string(body) + baseResponse.originHttpResponse = httpResponse + return +} + +func (baseResponse *BaseResponse) String() string { + resultBuilder := bytes.Buffer{} + // statusCode + resultBuilder.WriteString("\n") + resultBuilder.WriteString(fmt.Sprintf("%s %s\n", baseResponse.originHttpResponse.Proto, baseResponse.originHttpResponse.Status)) + // httpHeaders + //resultBuilder.WriteString("Headers:\n") + for key, value := range baseResponse.httpHeaders { + resultBuilder.WriteString(key + ": " + strings.Join(value, ";") + "\n") + } + resultBuilder.WriteString("\n") + // content + //resultBuilder.WriteString("Content:\n") + resultBuilder.WriteString(baseResponse.httpContentString + "\n") + return resultBuilder.String() +} + +type CommonResponse struct { + *BaseResponse +} + +func NewCommonResponse() (response *CommonResponse) { + return &CommonResponse{ + BaseResponse: &BaseResponse{}, + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils/utils.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils/utils.go new file mode 100644 index 0000000000..a00c775c2f --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/sdk/utils/utils.go @@ -0,0 +1,117 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package utils + +import ( + "crypto/md5" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "github.com/satori/go.uuid" + "net/url" + "reflect" + "strconv" + "time" +) + +// if you use go 1.10 or higher, you can hack this util by these to avoid "TimeZone.zip not found" on Windows +var LoadLocationFromTZData func(name string, data []byte) (*time.Location, error) = nil +var TZData []byte = nil + +func GetUUIDV4() (uuidHex string) { + uuidV4 := uuid.NewV4() + uuidHex = hex.EncodeToString(uuidV4.Bytes()) + return +} + +func GetMD5Base64(bytes []byte) (base64Value string) { + md5Ctx := md5.New() + md5Ctx.Write(bytes) + md5Value := md5Ctx.Sum(nil) + base64Value = base64.StdEncoding.EncodeToString(md5Value) + return +} + +func GetGMTLocation() (*time.Location, error) { + if LoadLocationFromTZData != nil && TZData != nil { + return LoadLocationFromTZData("GMT", TZData) + } else { + return time.LoadLocation("GMT") + } +} + +func GetTimeInFormatISO8601() (timeStr string) { + gmt, err := GetGMTLocation() + + if err != nil { + panic(err) + } + return time.Now().In(gmt).Format("2006-01-02T15:04:05Z") +} + +func GetTimeInFormatRFC2616() (timeStr string) { + gmt, err := GetGMTLocation() + + if err != nil { + panic(err) + } + return time.Now().In(gmt).Format("Mon, 02 Jan 2006 15:04:05 GMT") +} + +func GetUrlFormedMap(source map[string]string) (urlEncoded string) { + urlEncoder := url.Values{} + for key, value := range source { + urlEncoder.Add(key, value) + } + urlEncoded = urlEncoder.Encode() + return +} + +func GetFromJsonString(jsonString, key string) (result string, err error) { + var responseMap map[string]*json.RawMessage + err = json.Unmarshal([]byte(jsonString), &responseMap) + if err != nil { + return + } + fmt.Println(string(*responseMap[key])) + err = json.Unmarshal(*responseMap[key], &result) + return +} + +func InitStructWithDefaultTag(bean interface{}) { + configType := reflect.TypeOf(bean) + for i := 0; i < configType.Elem().NumField(); i++ { + field := configType.Elem().Field(i) + defaultValue := field.Tag.Get("default") + if defaultValue == "" { + continue + } + setter := reflect.ValueOf(bean).Elem().Field(i) + switch field.Type.String() { + case "int": + intValue, _ := strconv.ParseInt(defaultValue, 10, 64) + setter.SetInt(intValue) + case "time.Duration": + intValue, _ := strconv.ParseInt(defaultValue, 10, 64) + setter.SetInt(intValue) + case "string": + setter.SetString(defaultValue) + case "bool": + boolValue, _ := strconv.ParseBool(defaultValue) + setter.SetBool(boolValue) + } + } +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/assume_role.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/assume_role.go new file mode 100644 index 0000000000..40f7a93ef0 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/assume_role.go @@ -0,0 +1,108 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" +) + +// AssumeRole invokes the sts.AssumeRole API synchronously +// api document: https://help.aliyun.com/api/sts/assumerole.html +func (client *Client) AssumeRole(request *AssumeRoleRequest) (response *AssumeRoleResponse, err error) { + response = CreateAssumeRoleResponse() + err = client.DoAction(request, response) + return +} + +// AssumeRoleWithChan invokes the sts.AssumeRole API asynchronously +// api document: https://help.aliyun.com/api/sts/assumerole.html +// asynchronous document: https://help.aliyun.com/document_detail/66220.html +func (client *Client) AssumeRoleWithChan(request *AssumeRoleRequest) (<-chan *AssumeRoleResponse, <-chan error) { + responseChan := make(chan *AssumeRoleResponse, 1) + errChan := make(chan error, 1) + err := client.AddAsyncTask(func() { + defer close(responseChan) + defer close(errChan) + response, err := client.AssumeRole(request) + if err != nil { + errChan <- err + } else { + responseChan <- response + } + }) + if err != nil { + errChan <- err + close(responseChan) + close(errChan) + } + return responseChan, errChan +} + +// AssumeRoleWithCallback invokes the sts.AssumeRole API asynchronously +// api document: https://help.aliyun.com/api/sts/assumerole.html +// asynchronous document: https://help.aliyun.com/document_detail/66220.html +func (client *Client) AssumeRoleWithCallback(request *AssumeRoleRequest, callback func(response *AssumeRoleResponse, err error)) <-chan int { + result := make(chan int, 1) + err := client.AddAsyncTask(func() { + var response *AssumeRoleResponse + var err error + defer close(result) + response, err = client.AssumeRole(request) + callback(response, err) + result <- 1 + }) + if err != nil { + defer close(result) + callback(nil, err) + result <- 0 + } + return result +} + +// AssumeRoleRequest is the request struct for api AssumeRole +type AssumeRoleRequest struct { + *requests.RpcRequest + DurationSeconds requests.Integer `position:"Query" name:"DurationSeconds"` + Policy string `position:"Query" name:"Policy"` + RoleArn string `position:"Query" name:"RoleArn"` + RoleSessionName string `position:"Query" name:"RoleSessionName"` +} + +// AssumeRoleResponse is the response struct for api AssumeRole +type AssumeRoleResponse struct { + *responses.BaseResponse + RequestId string `json:"RequestId" xml:"RequestId"` + Credentials Credentials `json:"Credentials" xml:"Credentials"` + AssumedRoleUser AssumedRoleUser `json:"AssumedRoleUser" xml:"AssumedRoleUser"` +} + +// CreateAssumeRoleRequest creates a request to invoke AssumeRole API +func CreateAssumeRoleRequest() (request *AssumeRoleRequest) { + request = &AssumeRoleRequest{ + RpcRequest: &requests.RpcRequest{}, + } + request.InitWithApiInfo("Sts", "2015-04-01", "AssumeRole", "", "") + return +} + +// CreateAssumeRoleResponse creates a response to parse from AssumeRole response +func CreateAssumeRoleResponse() (response *AssumeRoleResponse) { + response = &AssumeRoleResponse{ + BaseResponse: &responses.BaseResponse{}, + } + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/client.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/client.go new file mode 100644 index 0000000000..dcb893d181 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/client.go @@ -0,0 +1,81 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth" +) + +// Client is the sdk client struct, each func corresponds to an OpenAPI +type Client struct { + sdk.Client +} + +// NewClient creates a sdk client with environment variables +func NewClient() (client *Client, err error) { + client = &Client{} + err = client.Init() + return +} + +// NewClientWithOptions creates a sdk client with regionId/sdkConfig/credential +// this is the common api to create a sdk client +func NewClientWithOptions(regionId string, config *sdk.Config, credential auth.Credential) (client *Client, err error) { + client = &Client{} + err = client.InitWithOptions(regionId, config, credential) + return +} + +// NewClientWithAccessKey is a shortcut to create sdk client with accesskey +// usage: https://help.aliyun.com/document_detail/66217.html +func NewClientWithAccessKey(regionId, accessKeyId, accessKeySecret string) (client *Client, err error) { + client = &Client{} + err = client.InitWithAccessKey(regionId, accessKeyId, accessKeySecret) + return +} + +// NewClientWithStsToken is a shortcut to create sdk client with sts token +// usage: https://help.aliyun.com/document_detail/66222.html +func NewClientWithStsToken(regionId, stsAccessKeyId, stsAccessKeySecret, stsToken string) (client *Client, err error) { + client = &Client{} + err = client.InitWithStsToken(regionId, stsAccessKeyId, stsAccessKeySecret, stsToken) + return +} + +// NewClientWithRamRoleArn is a shortcut to create sdk client with ram roleArn +// usage: https://help.aliyun.com/document_detail/66222.html +func NewClientWithRamRoleArn(regionId string, accessKeyId, accessKeySecret, roleArn, roleSessionName string) (client *Client, err error) { + client = &Client{} + err = client.InitWithRamRoleArn(regionId, accessKeyId, accessKeySecret, roleArn, roleSessionName) + return +} + +// NewClientWithEcsRamRole is a shortcut to create sdk client with ecs ram role +// usage: https://help.aliyun.com/document_detail/66223.html +func NewClientWithEcsRamRole(regionId string, roleName string) (client *Client, err error) { + client = &Client{} + err = client.InitWithEcsRamRole(regionId, roleName) + return +} + +// NewClientWithRsaKeyPair is a shortcut to create sdk client with rsa key pair +// attention: rsa key pair auth is only Japan regions available +func NewClientWithRsaKeyPair(regionId string, publicKeyId, privateKey string, sessionExpiration int) (client *Client, err error) { + client = &Client{} + err = client.InitWithRsaKeyPair(regionId, publicKeyId, privateKey, sessionExpiration) + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/generate_session_access_key.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/generate_session_access_key.go new file mode 100644 index 0000000000..d9e9fe04b5 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/generate_session_access_key.go @@ -0,0 +1,104 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" +) + +// GenerateSessionAccessKey invokes the sts.GenerateSessionAccessKey API synchronously +// api document: https://help.aliyun.com/api/sts/generatesessionaccesskey.html +func (client *Client) GenerateSessionAccessKey(request *GenerateSessionAccessKeyRequest) (response *GenerateSessionAccessKeyResponse, err error) { + response = CreateGenerateSessionAccessKeyResponse() + err = client.DoAction(request, response) + return +} + +// GenerateSessionAccessKeyWithChan invokes the sts.GenerateSessionAccessKey API asynchronously +// api document: https://help.aliyun.com/api/sts/generatesessionaccesskey.html +// asynchronous document: https://help.aliyun.com/document_detail/66220.html +func (client *Client) GenerateSessionAccessKeyWithChan(request *GenerateSessionAccessKeyRequest) (<-chan *GenerateSessionAccessKeyResponse, <-chan error) { + responseChan := make(chan *GenerateSessionAccessKeyResponse, 1) + errChan := make(chan error, 1) + err := client.AddAsyncTask(func() { + defer close(responseChan) + defer close(errChan) + response, err := client.GenerateSessionAccessKey(request) + if err != nil { + errChan <- err + } else { + responseChan <- response + } + }) + if err != nil { + errChan <- err + close(responseChan) + close(errChan) + } + return responseChan, errChan +} + +// GenerateSessionAccessKeyWithCallback invokes the sts.GenerateSessionAccessKey API asynchronously +// api document: https://help.aliyun.com/api/sts/generatesessionaccesskey.html +// asynchronous document: https://help.aliyun.com/document_detail/66220.html +func (client *Client) GenerateSessionAccessKeyWithCallback(request *GenerateSessionAccessKeyRequest, callback func(response *GenerateSessionAccessKeyResponse, err error)) <-chan int { + result := make(chan int, 1) + err := client.AddAsyncTask(func() { + var response *GenerateSessionAccessKeyResponse + var err error + defer close(result) + response, err = client.GenerateSessionAccessKey(request) + callback(response, err) + result <- 1 + }) + if err != nil { + defer close(result) + callback(nil, err) + result <- 0 + } + return result +} + +// GenerateSessionAccessKeyRequest is the request struct for api GenerateSessionAccessKey +type GenerateSessionAccessKeyRequest struct { + *requests.RpcRequest + DurationSeconds requests.Integer `position:"Query" name:"DurationSeconds"` +} + +// GenerateSessionAccessKeyResponse is the response struct for api GenerateSessionAccessKey +type GenerateSessionAccessKeyResponse struct { + *responses.BaseResponse + RequestId string `json:"RequestId" xml:"RequestId"` + SessionAccessKey SessionAccessKey `json:"SessionAccessKey" xml:"SessionAccessKey"` +} + +// CreateGenerateSessionAccessKeyRequest creates a request to invoke GenerateSessionAccessKey API +func CreateGenerateSessionAccessKeyRequest() (request *GenerateSessionAccessKeyRequest) { + request = &GenerateSessionAccessKeyRequest{ + RpcRequest: &requests.RpcRequest{}, + } + request.InitWithApiInfo("Sts", "2015-04-01", "GenerateSessionAccessKey", "", "") + return +} + +// CreateGenerateSessionAccessKeyResponse creates a response to parse from GenerateSessionAccessKey response +func CreateGenerateSessionAccessKeyResponse() (response *GenerateSessionAccessKeyResponse) { + response = &GenerateSessionAccessKeyResponse{ + BaseResponse: &responses.BaseResponse{}, + } + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/get_caller_identity.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/get_caller_identity.go new file mode 100644 index 0000000000..7997f04329 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/get_caller_identity.go @@ -0,0 +1,108 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" +) + +// GetCallerIdentity invokes the sts.GetCallerIdentity API synchronously +// api document: https://help.aliyun.com/api/sts/getcalleridentity.html +func (client *Client) GetCallerIdentity(request *GetCallerIdentityRequest) (response *GetCallerIdentityResponse, err error) { + response = CreateGetCallerIdentityResponse() + err = client.DoAction(request, response) + return +} + +// GetCallerIdentityWithChan invokes the sts.GetCallerIdentity API asynchronously +// api document: https://help.aliyun.com/api/sts/getcalleridentity.html +// asynchronous document: https://help.aliyun.com/document_detail/66220.html +func (client *Client) GetCallerIdentityWithChan(request *GetCallerIdentityRequest) (<-chan *GetCallerIdentityResponse, <-chan error) { + responseChan := make(chan *GetCallerIdentityResponse, 1) + errChan := make(chan error, 1) + err := client.AddAsyncTask(func() { + defer close(responseChan) + defer close(errChan) + response, err := client.GetCallerIdentity(request) + if err != nil { + errChan <- err + } else { + responseChan <- response + } + }) + if err != nil { + errChan <- err + close(responseChan) + close(errChan) + } + return responseChan, errChan +} + +// GetCallerIdentityWithCallback invokes the sts.GetCallerIdentity API asynchronously +// api document: https://help.aliyun.com/api/sts/getcalleridentity.html +// asynchronous document: https://help.aliyun.com/document_detail/66220.html +func (client *Client) GetCallerIdentityWithCallback(request *GetCallerIdentityRequest, callback func(response *GetCallerIdentityResponse, err error)) <-chan int { + result := make(chan int, 1) + err := client.AddAsyncTask(func() { + var response *GetCallerIdentityResponse + var err error + defer close(result) + response, err = client.GetCallerIdentity(request) + callback(response, err) + result <- 1 + }) + if err != nil { + defer close(result) + callback(nil, err) + result <- 0 + } + return result +} + +// GetCallerIdentityRequest is the request struct for api GetCallerIdentity +type GetCallerIdentityRequest struct { + *requests.RpcRequest +} + +// GetCallerIdentityResponse is the response struct for api GetCallerIdentity +type GetCallerIdentityResponse struct { + *responses.BaseResponse + AccountId string `json:"AccountId" xml:"AccountId"` + UserId string `json:"UserId" xml:"UserId"` + RoleId string `json:"RoleId" xml:"RoleId"` + Arn string `json:"Arn" xml:"Arn"` + IdentityType string `json:"IdentityType" xml:"IdentityType"` + PrincipalId string `json:"PrincipalId" xml:"PrincipalId"` + RequestId string `json:"RequestId" xml:"RequestId"` +} + +// CreateGetCallerIdentityRequest creates a request to invoke GetCallerIdentity API +func CreateGetCallerIdentityRequest() (request *GetCallerIdentityRequest) { + request = &GetCallerIdentityRequest{ + RpcRequest: &requests.RpcRequest{}, + } + request.InitWithApiInfo("Sts", "2015-04-01", "GetCallerIdentity", "", "") + return +} + +// CreateGetCallerIdentityResponse creates a response to parse from GetCallerIdentity response +func CreateGetCallerIdentityResponse() (response *GetCallerIdentityResponse) { + response = &GetCallerIdentityResponse{ + BaseResponse: &responses.BaseResponse{}, + } + return +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_assumed_role_user.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_assumed_role_user.go new file mode 100644 index 0000000000..37107b6ecb --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_assumed_role_user.go @@ -0,0 +1,22 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// AssumedRoleUser is a nested struct in sts response +type AssumedRoleUser struct { + Arn string `json:"Arn" xml:"Arn"` + AssumedRoleId string `json:"AssumedRoleId" xml:"AssumedRoleId"` +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_credentials.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_credentials.go new file mode 100644 index 0000000000..24d7a2abfc --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_credentials.go @@ -0,0 +1,24 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// Credentials is a nested struct in sts response +type Credentials struct { + SecurityToken string `json:"SecurityToken" xml:"SecurityToken"` + AccessKeySecret string `json:"AccessKeySecret" xml:"AccessKeySecret"` + AccessKeyId string `json:"AccessKeyId" xml:"AccessKeyId"` + Expiration string `json:"Expiration" xml:"Expiration"` +} diff --git a/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_session_access_key.go b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_session_access_key.go new file mode 100644 index 0000000000..cda251a317 --- /dev/null +++ b/vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/sts/struct_session_access_key.go @@ -0,0 +1,23 @@ +package sts + +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +// +//http://www.apache.org/licenses/LICENSE-2.0 +// +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +// +// Code generated by Alibaba Cloud SDK Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// SessionAccessKey is a nested struct in sts response +type SessionAccessKey struct { + SessionAccessKeyId string `json:"SessionAccessKeyId" xml:"SessionAccessKeyId"` + SessionAccessKeySecret string `json:"SessionAccessKeySecret" xml:"SessionAccessKeySecret"` + Expiration string `json:"Expiration" xml:"Expiration"` +} diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.lock b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.lock new file mode 100644 index 0000000000..252535fee9 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.lock @@ -0,0 +1,409 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + branch = "master" + digest = "1:ca434ab0bb896342ec8d84471d5ed9b70ac55fabf7ea365d352a39e4467e398e" + name = "github.com/SermoDigital/jose" + packages = [ + ".", + "crypto", + "jws", + "jwt", + ] + pruneopts = "UT" + revision = "803625baeddc3526d01d321b5066029f53eafc81" + +[[projects]] + digest = "1:cd50dd629977bc135b5826a4e16c95964db21bb9fe90e49d87ad0b8a720d0666" + name = "github.com/aliyun/alibaba-cloud-sdk-go" + packages = [ + "sdk", + "sdk/auth", + "sdk/auth/credentials", + "sdk/auth/signers", + "sdk/endpoints", + "sdk/errors", + "sdk/requests", + "sdk/responses", + "sdk/utils", + "services/sts", + ] + pruneopts = "UT" + revision = "59d358a6968627414637026239c97bd8444d056b" + version = "1.23.0" + +[[projects]] + branch = "master" + digest = "1:9fd3a6ab34bb103ba228eefd044d3f9aa476237ea95a46d12e8cccd3abf3fea2" + name = "github.com/armon/go-radix" + packages = ["."] + pruneopts = "UT" + revision = "1fca145dffbcaa8fe914309b1ec0cfc67500fe61" + +[[projects]] + digest = "1:d0ea933d11b365b09866da8f0bfacd9172e961430b5102fe854713faff1f3636" + name = "github.com/golang/protobuf" + packages = [ + "proto", + "ptypes", + "ptypes/any", + "ptypes/duration", + "ptypes/timestamp", + ] + pruneopts = "UT" + revision = "3a3da3a4e26776cc22a79ef46d5d58477532dede" + +[[projects]] + digest = "1:29a5ab9fa9e845fd8e8726f31b187d710afd271ef1eb32085fe3d604b7e06382" + name = "github.com/golang/snappy" + packages = ["."] + pruneopts = "UT" + revision = "553a641470496b2327abcac10b36396bd98e45c9" + +[[projects]] + digest = "1:07671f8997086ed115824d1974507d2b147d1e0463675ea5dbf3be89b1c2c563" + name = "github.com/hashicorp/errwrap" + packages = ["."] + pruneopts = "UT" + revision = "7554cd9344cec97297fa6649b055a8c98c2a1e55" + +[[projects]] + branch = "master" + digest = "1:77cb3be9b21ba7f1a4701e870c84ea8b66e7d74c7c8951c58155fdadae9414ec" + name = "github.com/hashicorp/go-cleanhttp" + packages = ["."] + pruneopts = "UT" + revision = "d5fe4b57a186c716b0e00b8c301cbd9b4182694d" + +[[projects]] + digest = "1:39e3c6a08d969bd30057f542ef0bdc08db97c04e708a61bb0a774b320a7296b2" + name = "github.com/hashicorp/go-hclog" + packages = ["."] + pruneopts = "UT" + revision = "69ff559dc25f3b435631604f573a5fa1efdb6433" + +[[projects]] + branch = "master" + digest = "1:2394f5a25132b3868eff44599cc28d44bdd0330806e34c495d754dd052df612b" + name = "github.com/hashicorp/go-immutable-radix" + packages = ["."] + pruneopts = "UT" + revision = "7f3cd4390caab3250a57f30efdb2a65dd7649ecf" + +[[projects]] + digest = "1:e5048c5da80697be2fcdecc944e29d2999e01fd7f48b643168443209779f3463" + name = "github.com/hashicorp/go-multierror" + packages = ["."] + pruneopts = "UT" + revision = "b7773ae218740a7be65057fc60b366a49b538a44" + +[[projects]] + branch = "master" + digest = "1:82320f8469d1524df337bc315a38c87644765cd89ec4cf3cbda249a3acdde671" + name = "github.com/hashicorp/go-plugin" + packages = ["."] + pruneopts = "UT" + revision = "e8d22c780116115ae5624720c9af0c97afe4f551" + +[[projects]] + digest = "1:27d0d41550b480f5a217f6ad9f82491248d092e9cea775845d8784e0fc7c7d1b" + name = "github.com/hashicorp/go-retryablehttp" + packages = ["."] + pruneopts = "UT" + revision = "3b087ef2d313afe6c55b2f511d20db04ca767075" + +[[projects]] + branch = "master" + digest = "1:35e9b9d8a799b6d4d4196f19cba3b0ffabf3c96b43eaedf388263d033c066616" + name = "github.com/hashicorp/go-rootcerts" + packages = ["."] + pruneopts = "UT" + revision = "6bb64b370b90e7ef1fa532be9e591a81c3493e00" + +[[projects]] + branch = "master" + digest = "1:14f2005c31ddf99c4a0f36fc440f8d1ac43224194c7c4a904b3c8f4ba5654d0b" + name = "github.com/hashicorp/go-sockaddr" + packages = ["."] + pruneopts = "UT" + revision = "6d291a969b86c4b633730bfc6b8b9d64c3aafed9" + +[[projects]] + digest = "1:b8edba99399cf7b4140dd74c9b942ab65155f02f08cccf2cf8246eb692bc5d4b" + name = "github.com/hashicorp/go-uuid" + packages = ["."] + pruneopts = "UT" + revision = "64130c7a86d732268a38cb04cfbaf0cc987fda98" + +[[projects]] + digest = "1:e4d5ee2b25927d97e077a7f9ec3f78a72d1385e36e0152b272f4834ba94e1e78" + name = "github.com/hashicorp/go-version" + packages = ["."] + pruneopts = "UT" + revision = "4fe82ae3040f80a03d04d2cccb5606a626b8e1ee" + +[[projects]] + branch = "master" + digest = "1:cf296baa185baae04a9a7004efee8511d08e2f5f51d4cbe5375da89722d681db" + name = "github.com/hashicorp/golang-lru" + packages = [ + ".", + "simplelru", + ] + pruneopts = "UT" + revision = "0fb14efe8c47ae851c0034ed7a448854d3d34cf3" + +[[projects]] + digest = "1:cfa3bd04b9f8d191ac98778cae3bf869e624b0e19c4d80d34d8eb98e4cc5de52" + name = "github.com/hashicorp/hcl" + packages = [ + ".", + "hcl/ast", + "hcl/parser", + "hcl/scanner", + "hcl/strconv", + "hcl/token", + "json/parser", + "json/scanner", + "json/token", + ] + pruneopts = "UT" + revision = "f40e974e75af4e271d97ce0fc917af5898ae7bda" + +[[projects]] + branch = "master" + digest = "1:3d008f995b8f2e49668ac7d41bb3d4caf6146d55319147757a1e59751a0cd932" + name = "github.com/hashicorp/vault" + packages = [ + "api", + "helper/certutil", + "helper/cidrutil", + "helper/compressutil", + "helper/consts", + "helper/errutil", + "helper/hclutil", + "helper/jsonutil", + "helper/locksutil", + "helper/logging", + "helper/mlock", + "helper/parseutil", + "helper/pathmanager", + "helper/pluginutil", + "helper/salt", + "helper/strutil", + "helper/wrapping", + "logical", + "logical/framework", + "logical/plugin", + "logical/plugin/pb", + "physical", + "physical/inmem", + "version", + ] + pruneopts = "UT" + revision = "13a2bb152e228bea04c88bdeefa3f289c208fcf2" + +[[projects]] + branch = "master" + digest = "1:89658943622e6bc5e76b4da027ee9583fa0b321db0c797bd554edab96c1ca2b1" + name = "github.com/hashicorp/yamux" + packages = ["."] + pruneopts = "UT" + revision = "3520598351bb3500a49ae9563f5539666ae0a27c" + +[[projects]] + digest = "1:b87714e57a511d88f307aba7d5b63522da12bed0a050889c81272fc50f71100e" + name = "github.com/jmespath/go-jmespath" + packages = ["."] + pruneopts = "UT" + revision = "3433f3ea46d9f8019119e7dd41274e112a2359a9" + version = "0.2.2" + +[[projects]] + digest = "1:eaefc85d32c03e5f0c2b88ea2f79fce3d993e2c78316d21319575dd4ea9153ca" + name = "github.com/json-iterator/go" + packages = ["."] + pruneopts = "UT" + revision = "ab8a2e0c74be9d3be70b3184d9acc634935ded82" + version = "1.1.4" + +[[projects]] + branch = "master" + digest = "1:8eb17c2ec4df79193ae65b621cd1c0c4697db3bc317fe6afdc76d7f2746abd05" + name = "github.com/mitchellh/go-homedir" + packages = ["."] + pruneopts = "UT" + revision = "3864e76763d94a6df2f9960b16a20a33da9f9a66" + +[[projects]] + branch = "master" + digest = "1:cae1afe858922bd10e9573b87130f730a6e4183a00eba79920d6656629468bfa" + name = "github.com/mitchellh/go-testing-interface" + packages = ["."] + pruneopts = "UT" + revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28" + +[[projects]] + digest = "1:ab9cfaf00fc5db5fd9d8e5f33da52e62bcc977d1976503dcc2a1492f391bd9ed" + name = "github.com/mitchellh/mapstructure" + packages = ["."] + pruneopts = "UT" + revision = "a4e142e9c047c904fa2f1e144d9a84e6133024bc" + +[[projects]] + digest = "1:33422d238f147d247752996a26574ac48dcf472976eda7f5134015f06bf16563" + name = "github.com/modern-go/concurrent" + packages = ["."] + pruneopts = "UT" + revision = "bacd9c7ef1dd9b15be4a9909b8ac7a4e313eec94" + version = "1.0.3" + +[[projects]] + digest = "1:e32bdbdb7c377a07a9a46378290059822efdce5c8d96fe71940d87cb4f918855" + name = "github.com/modern-go/reflect2" + packages = ["."] + pruneopts = "UT" + revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd" + version = "1.0.1" + +[[projects]] + digest = "1:9ec6cf1df5ad1d55cf41a43b6b1e7e118a91bade4f68ff4303379343e40c0e25" + name = "github.com/oklog/run" + packages = ["."] + pruneopts = "UT" + revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39" + version = "v1.0.0" + +[[projects]] + branch = "master" + digest = "1:5b92d232e81c3e8eec282c92dcaa2e0e1ad3c23157be19a01b3e33f7e6e8d137" + name = "github.com/ryanuber/go-glob" + packages = ["."] + pruneopts = "UT" + revision = "256dc444b735e061061cf46c809487313d5b0065" + +[[projects]] + digest = "1:274f67cb6fed9588ea2521ecdac05a6d62a8c51c074c1fccc6a49a40ba80e925" + name = "github.com/satori/go.uuid" + packages = ["."] + pruneopts = "UT" + revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3" + version = "v1.2.0" + +[[projects]] + digest = "1:7d68173c25f7e56987596700421ae9cd87d6f7dc745e0a1de5dae37683611acb" + name = "golang.org/x/net" + packages = [ + "context", + "http2", + "http2/hpack", + "idna", + "internal/timeseries", + "lex/httplex", + "trace", + ] + pruneopts = "UT" + revision = "f5dfe339be1d06f81b22525fe34671ee7d2c8904" + +[[projects]] + digest = "1:ba7d5e85e8b4f084fae02a1a9d7462980e889d1eb689c747507b30a30b8bfa67" + name = "golang.org/x/sys" + packages = ["unix"] + pruneopts = "UT" + revision = "37707fdb30a5b38865cfb95e5aab41707daec7fd" + +[[projects]] + digest = "1:7509ba4347d1f8de6ae9be8818b0cd1abc3deeffe28aeaf4be6d4b6b5178d9ca" + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable", + ] + pruneopts = "UT" + revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" + version = "v0.3.0" + +[[projects]] + branch = "master" + digest = "1:c9e7a4b4d47c0ed205d257648b0e5b0440880cb728506e318f8ac7cd36270bc4" + name = "golang.org/x/time" + packages = ["rate"] + pruneopts = "UT" + revision = "fbb02b2291d28baffd63558aa44b4b56f178d650" + +[[projects]] + branch = "master" + digest = "1:601e63e7d4577f907118bec825902505291918859d223bce015539e79f1160e3" + name = "google.golang.org/genproto" + packages = ["googleapis/rpc/status"] + pruneopts = "UT" + revision = "02b4e95473316948020af0b7a4f0f22c73929b0e" + +[[projects]] + digest = "1:3f0daa1176772835d89c92fe1153eb007cf70671e81ace72335ad3f9c224c837" + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "balancer/base", + "balancer/roundrobin", + "codes", + "connectivity", + "credentials", + "encoding", + "encoding/proto", + "grpclb/grpc_lb_v1/messages", + "grpclog", + "health", + "health/grpc_health_v1", + "internal", + "keepalive", + "metadata", + "naming", + "peer", + "resolver", + "resolver/dns", + "resolver/passthrough", + "stats", + "status", + "tap", + "transport", + ] + pruneopts = "UT" + revision = "d50734d1d6ca477a72646f3022216ec39639f4cd" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = [ + "github.com/aliyun/alibaba-cloud-sdk-go/sdk", + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials", + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints", + "github.com/aliyun/alibaba-cloud-sdk-go/services/sts", + "github.com/hashicorp/errwrap", + "github.com/hashicorp/go-cleanhttp", + "github.com/hashicorp/go-hclog", + "github.com/hashicorp/go-sockaddr", + "github.com/hashicorp/vault/api", + "github.com/hashicorp/vault/helper/cidrutil", + "github.com/hashicorp/vault/helper/parseutil", + "github.com/hashicorp/vault/helper/pluginutil", + "github.com/hashicorp/vault/logical", + "github.com/hashicorp/vault/logical/framework", + "github.com/hashicorp/vault/logical/plugin", + ] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.toml b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.toml new file mode 100644 index 0000000000..26519c80fc --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Gopkg.toml @@ -0,0 +1,46 @@ +# Gopkg.toml example +# +# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" +# +# [prune] +# non-go = false +# go-tests = true +# unused-packages = true + + +[[constraint]] + name = "github.com/aliyun/alibaba-cloud-sdk-go" + version = "1.23.0" + +[[constraint]] + branch = "master" + name = "github.com/hashicorp/go-cleanhttp" + +[[constraint]] + branch = "master" + name = "github.com/hashicorp/go-sockaddr" + +[[constraint]] + branch = "master" + name = "github.com/hashicorp/vault" + +[prune] + go-tests = true + unused-packages = true diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/LICENSE b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/LICENSE new file mode 100644 index 0000000000..a612ad9813 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Makefile b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Makefile new file mode 100644 index 0000000000..cf83a4db5a --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/Makefile @@ -0,0 +1,55 @@ +TOOL?=vault-plugin-auth-alibaba +TEST?=$$(go list ./... | grep -v /vendor/ | grep -v teamcity) +VETARGS?=-asmdecl -atomic -bool -buildtags -copylocks -methods -nilfunc -printf -rangeloops -shift -structtags -unsafeptr +EXTERNAL_TOOLS=\ + github.com/mitchellh/gox \ + github.com/golang/dep/cmd/dep +BUILD_TAGS?=${TOOL} +GOFMT_FILES?=$$(find . -name '*.go' | grep -v vendor) + +# bin generates the releaseable binaries for this plugin +bin: fmtcheck generate + @CGO_ENABLED=0 BUILD_TAGS='$(BUILD_TAGS)' sh -c "'$(CURDIR)/scripts/build.sh'" + +default: dev + +# dev creates binaries for testing Vault locally. These are put +# into ./bin/ as well as $GOPATH/bin. +dev: fmtcheck generate + @CGO_ENABLED=0 BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'" + +# testshort runs the quick unit tests and vets the code +testshort: fmtcheck generate + CGO_ENABLED=0 VAULT_TOKEN= VAULT_ACC= go test -short -tags='$(BUILD_TAGS)' $(TEST) $(TESTARGS) -count=1 -timeout=20m -parallel=4 + +# test runs the unit tests and vets the code +test: fmtcheck generate + CGO_ENABLED=0 VAULT_TOKEN= VAULT_ACC= go test -v -tags='$(BUILD_TAGS)' $(TEST) $(TESTARGS) -count=1 -timeout=20m -parallel=4 + +testcompile: fmtcheck generate + @for pkg in $(TEST) ; do \ + go test -v -c -tags='$(BUILD_TAGS)' $$pkg -parallel=4 ; \ + done + +# generate runs `go generate` to build the dynamically generated +# source files. +generate: + go generate $(go list ./... | grep -v /vendor/) + +# bootstrap the build by downloading additional tools +bootstrap: + @for tool in $(EXTERNAL_TOOLS) ; do \ + echo "Installing/Updating $$tool" ; \ + go get -u $$tool; \ + done + +fmtcheck: + @sh -c "'$(CURDIR)/scripts/gofmtcheck.sh'" + +fmt: + gofmt -w $(GOFMT_FILES) + +proto: + protoc *.proto --go_out=plugins=grpc:. + +.PHONY: bin default generate test vet bootstrap fmt fmtcheck \ No newline at end of file diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/README.md b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/README.md new file mode 100644 index 0000000000..91be81f541 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/README.md @@ -0,0 +1,127 @@ +# Vault Plugin: AliCloud Auth Backend + +This is a standalone backend plugin for use with [Hashicorp Vault](https://www.github.com/hashicorp/vault). +This plugin allows authentication to Vault using Resource Access Management (RAM). + +**Please note**: We take Vault's security and our users' trust very seriously. If you believe you have found a security issue in Vault, _please responsibly disclose_ by contacting us at [security@hashicorp.com](mailto:security@hashicorp.com). + +## Quick Links + - Vault Website: https://www.vaultproject.io + - AliCloud Auth Docs: https://www.vaultproject.io/docs/auth/alicloud.html + - Main Project Github: https://www.github.com/hashicorp/vault + +## Getting Started + +This is a [Vault plugin](https://www.vaultproject.io/docs/internals/plugins.html) +and is meant to work with Vault. This guide assumes you have already installed Vault +and have a basic understanding of how Vault works. + +Otherwise, first read this guide on how to [get started with Vault](https://www.vaultproject.io/intro/getting-started/install.html). + +To learn specifically about how plugins work, see documentation on [Vault plugins](https://www.vaultproject.io/docs/internals/plugins.html). + +## Security Model + +This authentication model places Vault in the middle of a call between a client and AliCloud's "GetCallerIdentity" method. Based on AliCloud's response, it grants an access token based on pre-configured roles. + +## Usage + +Please see [documentation for the plugin](https://www.vaultproject.io/docs/auth/alicloud.html) +on the Vault website. + +This plugin is currently built into Vault and by default is accessed +at `auth/alicloud`. To enable this in a running Vault server: + +```sh +$ vault auth enable alicloud +Successfully enabled 'alicloud' at 'alicloud'! +``` + +To see all the supported paths, see the [AliCloud auth backend docs](https://www.vaultproject.io/docs/auth/alicloud.html). + +## Developing + +If you wish to work on this plugin, you'll first need +[Go](https://www.golang.org) installed on your machine. + +For local dev first make sure Go is properly installed, including +setting up a [GOPATH](https://golang.org/doc/code.html#GOPATH). +Next, clone this repository into +`$GOPATH/src/github.com/hashicorp/vault-plugin-auth-alicloud`. +You can then download any required build tools by bootstrapping your +environment: + +```sh +$ make bootstrap +``` + +To compile a development version of this plugin, run `make` or `make dev`. +This will put the plugin binary in the `bin` and `$GOPATH/bin` folders. `dev` +mode will only generate the binary for your platform and is faster: + +```sh +$ make +$ make dev +``` + +Put the plugin binary into a location of your choice. This directory +will be specified as the [`plugin_directory`](https://www.vaultproject.io/docs/configuration/index.html#plugin_directory) +in the Vault config used to start the server. + +```json +... +plugin_directory = "path/to/plugin/directory" +... +``` + +Start a Vault server with this config file: +```sh +$ vault server -config=path/to/config.json ... +... +``` + +Once the server is started, register the plugin in the Vault server's [plugin catalog](https://www.vaultproject.io/docs/internals/plugins.html#plugin-catalog): + +```sh +$ vault write sys/plugins/catalog/alicloud \ + sha_256= \ + command="vault-plugin-auth-alicloud" +... +Success! Data written to: sys/plugins/catalog/alicloud +``` + +Note you should generate a new sha256 checksum if you have made changes +to the plugin. Example using openssl: + +```sh +openssl dgst -sha256 $GOPATH/vault-plugin-auth-alicloud +... +SHA256(.../go/bin/vault-plugin-auth-alicloud)= 896c13c0f5305daed381952a128322e02bc28a57d0c862a78cbc2ea66e8c6fa1 +``` + +Enable the auth plugin backend using the AliCloud auth plugin: + +```sh +$ vault auth enable -plugin-name='alicloud' plugin +... + +Successfully enabled 'plugin' at 'alicloud'! +``` + +#### Tests + +If you are developing this plugin and want to verify it is still +functioning (and you haven't broken anything else), we recommend +running the tests. + +To run the tests, invoke `make test`: + +```sh +$ make test +``` + +You can also specify a `TESTARGS` variable to filter tests like so: + +```sh +$ make test TESTARGS='--run=TestConfig' +``` \ No newline at end of file diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/arn.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/arn.go new file mode 100644 index 0000000000..c728ac9a73 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/arn.go @@ -0,0 +1,94 @@ +package alicloud + +import ( + "errors" + "fmt" + "strings" +) + +func parseARN(a string) (*arn, error) { + if a == "" { + return nil, errors.New("no arn provided") + } + + // Example: "acs:ram::5138828231865461:assumed-role/elk/vm-ram-i-rj978rorvlg76urhqh7q" + parsed := &arn{ + Full: a, + } + outerFields := strings.Split(a, ":") + if len(outerFields) != 5 { + return nil, fmt.Errorf("unrecognized arn: contains %d colon-separated fields, expected 5", len(outerFields)) + } + if outerFields[0] != "acs" { + return nil, errors.New(`unrecognized arn: does not begin with "acs:"`) + } + if outerFields[1] != "ram" { + return nil, fmt.Errorf("unrecognized service: %v, not ram", outerFields[1]) + } + parsed.AccountNumber = outerFields[3] + + roleFields := strings.Split(outerFields[4], "/") + if len(roleFields) < 2 { + return nil, fmt.Errorf("unrecognized arn: %q contains fewer than 2 slash-separated roleFields", outerFields[4]) + } + + entityType := roleFields[0] + switch entityType { + case "assumed-role": + parsed.Type = arnTypeAssumedRole + case "role": + parsed.Type = arnTypeRole + default: + return nil, fmt.Errorf("unsupported parsed type: %s", entityType) + } + + parsed.RoleName = roleFields[1] + if len(roleFields) > 2 { + parsed.RoleAssumerName = roleFields[2] + } + return parsed, nil +} + +type arnType int + +func (t arnType) String() string { + switch t { + case arnTypeRole: + return "role" + case arnTypeAssumedRole: + return "assumed-role" + default: + return "" + } +} + +const ( + arnTypeRole arnType = iota + arnTypeAssumedRole +) + +type arn struct { + AccountNumber string + Type arnType + RoleName string + RoleAssumerName string + Full string +} + +func (a *arn) IsMemberOf(possibleParent *arn) bool { + if possibleParent.Type != arnTypeRole || a.Type != arnTypeAssumedRole { + // We currently only support the relationship between roles and assumed roles. + return false + } + if possibleParent.AccountNumber != a.AccountNumber { + return false + } + if possibleParent.RoleName != a.RoleName { + return false + } + return true +} + +func (a *arn) String() string { + return a.Full +} diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/backend.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/backend.go new file mode 100644 index 0000000000..e6811c7dfd --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/backend.go @@ -0,0 +1,57 @@ +package alicloud + +import ( + "context" + "net/http" + + "github.com/hashicorp/go-cleanhttp" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" +) + +func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { + client := cleanhttp.DefaultClient() + client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + } + b := newBackend(client) + if err := b.Setup(ctx, conf); err != nil { + return nil, err + } + return b, nil +} + +// newBackend exists for testability. It allows us to inject a fake client. +func newBackend(client *http.Client) *backend { + b := &backend{ + identityClient: client, + } + b.Backend = &framework.Backend{ + AuthRenew: b.pathLoginRenew, + Help: backendHelp, + PathsSpecial: &logical.Paths{ + Unauthenticated: []string{ + "login", + }, + }, + Paths: []*framework.Path{ + pathLogin(b), + pathListRole(b), + pathListRoles(b), + pathRole(b), + }, + BackendType: logical.TypeCredential, + } + return b +} + +type backend struct { + *framework.Backend + + identityClient *http.Client +} + +const backendHelp = ` +That AliCloud RAM auth method allows entities to authenticate based on their +identity and pre-configured roles. +` diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/cli.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/cli.go new file mode 100644 index 0000000000..4b9d8c35c0 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/cli.go @@ -0,0 +1,75 @@ +package alicloud + +import ( + "errors" + "fmt" + "strings" + + "github.com/hashicorp/vault-plugin-auth-alicloud/tools" + "github.com/hashicorp/vault/api" +) + +type CLIHandler struct{} + +func (h *CLIHandler) Auth(c *api.Client, m map[string]string) (*api.Secret, error) { + mount, ok := m["mount"] + if !ok { + mount = "alicloud" + } + role := m["role"] + + loginData, err := tools.GenerateLoginData(m["access_key"], m["secret_key"], m["security_token"], m["region"]) + if err != nil { + return nil, err + } + + loginData["role"] = role + path := fmt.Sprintf("auth/%s/login", mount) + + secret, err := c.Logical().Write(path, loginData) + if err != nil { + return nil, err + } + if secret == nil { + return nil, errors.New("empty response from credential provider") + } + return secret, nil +} + +func (h *CLIHandler) Help() string { + help := ` +Usage: vault login -method=alicloud [CONFIG K=V...] + + The AliCloud auth method allows users to authenticate with AliCloud RAM + credentials. + + The AliCloud RAM credentials may be specified explicitly via the command line: + + $ vault login -method=alicloud access_key=... secret_key=... security_token=... region=... + +Configuration: + + access_key= + Explicit AliCloud access key ID + + secret_key= + Explicit AliCloud secret access key + + security_token= + Explicit AliCloud security token + + region= + Explicit AliCloud region + + mount= + Path where the AliCloud credential method is mounted. This is usually provided + via the -path flag in the "vault login" command, but it can be specified + here as well. If specified here, it takes precedence over the value for + -path. The default value is "alicloud". + + role= + Name of the role to request a token against +` + + return strings.TrimSpace(help) +} diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_login.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_login.go new file mode 100644 index 0000000000..3ec7d0b20a --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_login.go @@ -0,0 +1,248 @@ +package alicloud + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/url" + + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/endpoints" + "github.com/aliyun/alibaba-cloud-sdk-go/services/sts" + "github.com/hashicorp/errwrap" + "github.com/hashicorp/vault/helper/cidrutil" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" +) + +func pathLogin(b *backend) *framework.Path { + return &framework.Path{ + Pattern: "login$", + Fields: map[string]*framework.FieldSchema{ + "role": { + Type: framework.TypeString, + Description: `Name of the role against which the login is being attempted. +If 'role' is not specified, then the login endpoint looks for a role name in the ARN returned by +the GetCallerIdentity request. If a matching role is not found, login fails.`, + }, + "identity_request_url": { + Type: framework.TypeString, + Description: "Base64-encoded full URL against which to make the AliCloud request.", + }, + "identity_request_headers": { + Type: framework.TypeHeader, + Description: `The request headers. This must include the headers over which AliCloud +has included a signature.`, + }, + }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathLoginUpdate, + }, + HelpSynopsis: pathLoginSyn, + HelpDescription: pathLoginDesc, + } +} + +func (b *backend) pathLoginUpdate(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { + + b64URL := data.Get("identity_request_url").(string) + if b64URL == "" { + return nil, errors.New("missing identity_request_url") + } + identityReqURL, err := base64.StdEncoding.DecodeString(b64URL) + if err != nil { + return nil, errwrap.Wrapf("failed to base64 decode identity_request_url: {{err}}", err) + } + if _, err := url.Parse(string(identityReqURL)); err != nil { + return nil, errwrap.Wrapf("error parsing identity_request_url: {{err}}", err) + } + + header := data.Get("identity_request_headers").(http.Header) + if len(header) == 0 { + return nil, errors.New("missing identity_request_headers") + } + + callerIdentity, err := b.getCallerIdentity(header, string(identityReqURL)) + if err != nil { + return nil, errwrap.Wrapf("error making upstream request: {{err}}", err) + } + + parsedARN, err := parseARN(callerIdentity.Arn) + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("unable to parse entity's arn %s due to {{err}}", callerIdentity.Arn), err) + } + if parsedARN.Type != arnTypeAssumedRole { + return nil, fmt.Errorf("only %s arn types are supported at this time, but %s was provided", arnTypeAssumedRole, parsedARN.Type) + } + + // If a role name was explicitly provided, use that, but otherwise fall back to using the role + // in the ARN returned by the GetCallerIdentity call. + roleName := "" + roleNameIfc, ok := data.GetOk("role") + if ok { + roleName = roleNameIfc.(string) + } else { + roleName = parsedARN.RoleName + } + + role, err := readRole(ctx, req.Storage, roleName) + if err != nil { + return nil, err + } + if role == nil { + return nil, fmt.Errorf("entry for role %s not found", parsedARN.RoleName) + } + + if !cidrutil.RemoteAddrIsOk(req.Connection.RemoteAddr, role.BoundCIDRs) { + return nil, errors.New("login request originated from invalid CIDR") + } + if !parsedARN.IsMemberOf(role.ARN) { + return nil, errors.New("the caller's arn does not match the role's arn") + } + return &logical.Response{ + Auth: &logical.Auth{ + Period: role.Period, + Policies: role.Policies, + Metadata: map[string]string{ + "account_id": callerIdentity.AccountId, + "user_id": callerIdentity.UserId, + "role_id": callerIdentity.RoleId, + "arn": callerIdentity.Arn, + "identity_type": callerIdentity.IdentityType, + "principal_id": callerIdentity.PrincipalId, + "request_id": callerIdentity.RequestId, + "role_name": parsedARN.RoleName, + }, + DisplayName: callerIdentity.PrincipalId, + LeaseOptions: logical.LeaseOptions{ + Renewable: true, + TTL: role.TTL, + MaxTTL: role.MaxTTL, + }, + Alias: &logical.Alias{ + Name: callerIdentity.PrincipalId, + }, + BoundCIDRs: role.BoundCIDRs, + }, + }, nil +} + +func (b *backend) pathLoginRenew(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { + // The arn set in metadata earlier is the assumed-role arn. + arn := req.Auth.Metadata["arn"] + if arn == "" { + return nil, errors.New("unable to retrieve arn from metadata during renewal") + } + parsedARN, err := parseARN(arn) + if err != nil { + return nil, err + } + + roleName, ok := req.Auth.Metadata["role_name"] + if !ok { + return nil, errors.New("error retrieving role_name during renewal") + } + + role, err := readRole(ctx, req.Storage, roleName) + if err != nil { + return nil, err + } + if role == nil { + return nil, errors.New("role entry not found") + } + + if !parsedARN.IsMemberOf(role.ARN) { + return nil, errors.New("the caller's arn does not match the role's arn") + } + + resp := &logical.Response{Auth: req.Auth} + resp.Auth.TTL = role.TTL + resp.Auth.MaxTTL = role.MaxTTL + resp.Auth.Period = role.Period + return resp, nil +} + +func (b *backend) getCallerIdentity(header http.Header, rawURL string) (*sts.GetCallerIdentityResponse, error) { + /* + Here we need to ensure we're actually hitting the AliCloud service, and that the caller didn't + inject a URL to their own service that will respond as desired. + */ + u, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + if u.Scheme != "https" { + return nil, fmt.Errorf(`expected "https" url scheme but received "%s"`, u.Scheme) + } + stsEndpoint, err := getSTSEndpoint() + if err != nil { + return nil, err + } + if u.Host != stsEndpoint { + return nil, fmt.Errorf(`expected host of "sts.aliyuncs.com" but received "%s"`, u.Host) + } + q := u.Query() + if q.Get("Format") != "JSON" { + return nil, fmt.Errorf("query Format must be JSON but received %s", q.Get("Format")) + } + if q.Get("Action") != "GetCallerIdentity" { + return nil, fmt.Errorf("query Action must be GetCallerIdentity but received %s", q.Get("Action")) + } + + request, err := http.NewRequest(http.MethodPost, rawURL, nil) + if err != nil { + return nil, err + } + request.Header = header + + response, err := b.identityClient.Do(request) + if err != nil { + return nil, errwrap.Wrapf("error making request: {{err}}", err) + } + defer response.Body.Close() + + if response.StatusCode != 200 { + b, err := ioutil.ReadAll(response.Body) + if err != nil { + return nil, errwrap.Wrapf("error reading response body: {{err}}", err) + } + return nil, fmt.Errorf("received %d checking caller identity: %s", response.StatusCode, b) + } + + result := &sts.GetCallerIdentityResponse{} + if err := json.NewDecoder(response.Body).Decode(result); err != nil { + return nil, errwrap.Wrapf("error decoding response: {{err}}", err) + } + return result, nil +} + +func getSTSEndpoint() (string, error) { + r := &endpoints.LocalGlobalResolver{} + endpoint, supported, err := r.TryResolve(&endpoints.ResolveParam{ + Product: "sts", + }) + if err != nil { + return "", err + } + if !supported { + return "", errors.New("sts endpoint is no longer supported") + } + if endpoint == "" { + return "", errors.New("got an empty endpoint") + } + return endpoint, nil +} + +const pathLoginSyn = ` +Authenticates an RAM entity with Vault. +` + +const pathLoginDesc = ` +Authenticate AliCloud entities using an arbitrary RAM principal. + +RAM principals are authenticated by processing a signed sts:GetCallerIdentity +request and then parsing the response to see who signed the request. +` diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_role.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_role.go new file mode 100644 index 0000000000..2de8aa4d90 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/path_role.go @@ -0,0 +1,228 @@ +package alicloud + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/vault/helper/parseutil" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" +) + +func pathRole(b *backend) *framework.Path { + return &framework.Path{ + Pattern: "role/" + framework.GenericNameRegex("role"), + Fields: map[string]*framework.FieldSchema{ + "role": { + Type: framework.TypeLowerCaseString, + Description: "The name of the role as it should appear in Vault.", + }, + "arn": { + Type: framework.TypeString, + Description: "ARN of the RAM to bind to this role.", + }, + "policies": { + Type: framework.TypeCommaStringSlice, + Description: "Policies to be set on tokens issued using this role.", + }, + "ttl": { + Type: framework.TypeDurationSecond, + Description: `Duration in seconds after which the issued token should expire. Defaults +to 0, in which case the value will fallback to the system/mount defaults.`, + }, + "max_ttl": { + Type: framework.TypeDurationSecond, + Description: "The maximum allowed lifetime of tokens issued using this role.", + }, + "period": { + Type: framework.TypeDurationSecond, + Description: ` +If set, indicates that the token generated using this role should never expire. +The token should be renewed within the duration specified by this value. At +each renewal, the token's TTL will be set to the value of this parameter.`, + }, + "bound_cidrs": { + Type: framework.TypeCommaStringSlice, + Description: `Comma separated string or list of CIDR blocks. If set, specifies the blocks of +IP addresses which can perform the login operation.`, + }, + }, + ExistenceCheck: b.operationRoleExistenceCheck, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.CreateOperation: b.operationRoleCreateUpdate, + logical.UpdateOperation: b.operationRoleCreateUpdate, + logical.ReadOperation: b.operationRoleRead, + logical.DeleteOperation: b.operationRoleDelete, + }, + HelpSynopsis: pathRoleSyn, + HelpDescription: pathRoleDesc, + } +} + +func pathListRole(b *backend) *framework.Path { + return &framework.Path{ + Pattern: "role/?", + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ListOperation: b.operationRoleList, + }, + HelpSynopsis: pathListRolesHelpSyn, + HelpDescription: pathListRolesHelpDesc, + } +} + +func pathListRoles(b *backend) *framework.Path { + return &framework.Path{ + Pattern: "roles/?", + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ListOperation: b.operationRoleList, + }, + HelpSynopsis: pathListRolesHelpSyn, + HelpDescription: pathListRolesHelpDesc, + } +} + +// Establishes dichotomy of request operation between CreateOperation and UpdateOperation. +// Returning 'true' forces an UpdateOperation, CreateOperation otherwise. +func (b *backend) operationRoleExistenceCheck(ctx context.Context, req *logical.Request, data *framework.FieldData) (bool, error) { + entry, err := readRole(ctx, req.Storage, data.Get("role").(string)) + if err != nil { + return false, err + } + return entry != nil, nil +} + +func (b *backend) operationRoleCreateUpdate(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { + + roleName := data.Get("role").(string) + + role, err := readRole(ctx, req.Storage, roleName) + if err != nil { + return nil, err + } + if role == nil && req.Operation == logical.UpdateOperation { + return nil, fmt.Errorf("no role found to update for %s", roleName) + } else if role == nil { + role = &roleEntry{} + } + + if raw, ok := data.GetOk("arn"); ok { + arn, err := parseARN(raw.(string)) + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("unable to parse arn %s: {{err}}", arn), err) + } + if arn.Type != arnTypeRole { + return nil, fmt.Errorf("only role arn types are supported at this time, but %s was provided", role.ARN) + } + role.ARN = arn + } else if req.Operation == logical.CreateOperation { + return nil, errors.New("the arn is required to create a role") + } + + // None of the remaining fields are required. + if raw, ok := data.GetOk("policies"); ok { + role.Policies = raw.([]string) + } + if raw, ok := data.GetOk("ttl"); ok { + role.TTL = time.Duration(raw.(int)) * time.Second + } + if raw, ok := data.GetOk("max_ttl"); ok { + role.MaxTTL = time.Duration(raw.(int)) * time.Second + } + if raw, ok := data.GetOk("period"); ok { + role.Period = time.Duration(raw.(int)) * time.Second + } + boundCIDRs, err := parseutil.ParseAddrs(data.Get("bound_cidrs")) + if err != nil { + return nil, err + } + role.BoundCIDRs = boundCIDRs + + if role.MaxTTL > 0 && role.TTL > role.MaxTTL { + return nil, errors.New("ttl exceeds max_ttl") + } + + entry, err := logical.StorageEntryJSON("role/"+roleName, role) + if err != nil { + return nil, err + } + if err := req.Storage.Put(ctx, entry); err != nil { + return nil, err + } + + if role.TTL > b.System().MaxLeaseTTL() { + resp := &logical.Response{} + resp.AddWarning(fmt.Sprintf("ttl of %d exceeds the system max ttl of %d, the latter will be used during login", role.TTL, b.System().MaxLeaseTTL())) + return resp, nil + } + return nil, nil +} + +func (b *backend) operationRoleRead(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { + role, err := readRole(ctx, req.Storage, data.Get("role").(string)) + if err != nil { + return nil, err + } + if role == nil { + return nil, nil + } + return &logical.Response{ + Data: role.ToResponseData(), + }, nil +} + +func (b *backend) operationRoleDelete(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { + if err := req.Storage.Delete(ctx, "role/"+data.Get("role").(string)); err != nil { + return nil, err + } + return nil, nil +} + +func (b *backend) operationRoleList(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { + roleNames, err := req.Storage.List(ctx, "role/") + if err != nil { + return nil, err + } + return logical.ListResponse(roleNames), nil +} + +func readRole(ctx context.Context, s logical.Storage, roleName string) (*roleEntry, error) { + role, err := s.Get(ctx, "role/"+roleName) + if err != nil { + return nil, err + } + if role == nil { + return nil, nil + } + result := &roleEntry{} + if err := role.DecodeJSON(result); err != nil { + return nil, err + } + return result, nil +} + +const pathRoleSyn = ` +Create a role and associate policies to it. +` + +const pathRoleDesc = ` +A precondition for login is that a role should be created in the backend. +The login endpoint takes in the role name against which the instance +should be validated. After authenticating the instance, the authorization +for the instance to access Vault's resources is determined by the policies +that are associated to the role though this endpoint. + +Also, a 'max_ttl' can be configured in this endpoint that determines the maximum +duration for which a login can be renewed. Note that the 'max_ttl' has an upper +limit of the 'max_ttl' value on the backend's mount. The same applies to the 'ttl'. +` + +const pathListRolesHelpSyn = ` +Lists all the roles that are registered with Vault. +` + +const pathListRolesHelpDesc = ` +Roles will be listed by their respective role names. +` diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/role_entry.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/role_entry.go new file mode 100644 index 0000000000..a9240ace72 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/role_entry.go @@ -0,0 +1,31 @@ +package alicloud + +import ( + "time" + + "github.com/hashicorp/go-sockaddr" +) + +type roleEntry struct { + ARN *arn `json:"arn"` + Policies []string `json:"policies"` + TTL time.Duration `json:"ttl"` + MaxTTL time.Duration `json:"max_ttl"` + Period time.Duration `json:"period"` + BoundCIDRs []*sockaddr.SockAddrMarshaler `json:"bound_cidrs"` +} + +func (r *roleEntry) ToResponseData() map[string]interface{} { + cidrs := make([]string, len(r.BoundCIDRs)) + for i, cidr := range r.BoundCIDRs { + cidrs[i] = cidr.String() + } + return map[string]interface{}{ + "arn": r.ARN.String(), + "policies": r.Policies, + "ttl": r.TTL / time.Second, + "max_ttl": r.MaxTTL / time.Second, + "period": r.Period / time.Second, + "bound_cidrs": cidrs, + } +} diff --git a/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/tools/tools.go b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/tools/tools.go new file mode 100644 index 0000000000..552932ad42 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-auth-alicloud/tools/tools.go @@ -0,0 +1,79 @@ +package tools + +import ( + "encoding/base64" + "encoding/json" + "errors" + "net/http" + "net/url" + + "github.com/aliyun/alibaba-cloud-sdk-go/sdk" + "github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials" + "github.com/aliyun/alibaba-cloud-sdk-go/services/sts" +) + +// Generates the necessary data to send to the Vault server for generating a token +// This is useful for other API clients to use +func GenerateLoginData(accessKeyID, accessKeySecret, securityToken, region string) (map[string]interface{}, error) { + creds := credentials.NewStsTokenCredential(accessKeyID, accessKeySecret, securityToken) + + config := sdk.NewConfig() + + // This call always must be https but the config doesn't default to that. + config.Scheme = "https" + + // Prepare to record the request using a proxy that will capture it and throw an error so it's not executed. + capturer := &RequestCapturer{} + transport := &http.Transport{} + transport.Proxy = capturer.Proxy + config.HttpTransport = transport + + client, err := sts.NewClientWithOptions(region, config, creds) + if err != nil { + return nil, err + } + + // This method returns a response and an error. We're ignoring both because the response + // will always be nil here, and the error will always be the error thrown by the Proxy + // method below. We don't care about either of them, we just care about firing the request + // so we can capture it on the way out and retrieve it for further use. + client.GetCallerIdentity(sts.CreateGetCallerIdentityRequest()) + + getCallerIdentityRequest, err := capturer.GetCapturedRequest() + if err != nil { + return nil, err + } + + u := base64.StdEncoding.EncodeToString([]byte(getCallerIdentityRequest.URL.String())) + b, err := json.Marshal(getCallerIdentityRequest.Header) + if err != nil { + return nil, err + } + headers := base64.StdEncoding.EncodeToString(b) + return map[string]interface{}{ + "identity_request_url": u, + "identity_request_headers": headers, + }, nil +} + +/* +RequestCapturer fulfills the Proxy method of http.Transport, so can be used to replace +the Proxy method on any transport method to simply capture the request. +Its Proxy method always returns an error so the request won't actually be fired. +This is useful for quickly finding out what final request a client is sending. +*/ +type RequestCapturer struct { + request *http.Request +} + +func (r *RequestCapturer) Proxy(req *http.Request) (*url.URL, error) { + r.request = req + return nil, errors.New("throwing an error so we won't actually execute the request") +} + +func (r *RequestCapturer) GetCapturedRequest() (*http.Request, error) { + if r.request == nil { + return nil, errors.New("no request captured") + } + return r.request, nil +} diff --git a/vendor/github.com/json-iterator/go/Gopkg.lock b/vendor/github.com/json-iterator/go/Gopkg.lock new file mode 100644 index 0000000000..c8a9fbb387 --- /dev/null +++ b/vendor/github.com/json-iterator/go/Gopkg.lock @@ -0,0 +1,21 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/modern-go/concurrent" + packages = ["."] + revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a" + version = "1.0.0" + +[[projects]] + name = "github.com/modern-go/reflect2" + packages = ["."] + revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd" + version = "1.0.1" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/json-iterator/go/Gopkg.toml b/vendor/github.com/json-iterator/go/Gopkg.toml new file mode 100644 index 0000000000..313a0f887b --- /dev/null +++ b/vendor/github.com/json-iterator/go/Gopkg.toml @@ -0,0 +1,26 @@ +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + +ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"] + +[[constraint]] + name = "github.com/modern-go/reflect2" + version = "1.0.1" diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE new file mode 100644 index 0000000000..2cf4f5ab28 --- /dev/null +++ b/vendor/github.com/json-iterator/go/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 json-iterator + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md new file mode 100644 index 0000000000..54d5afe957 --- /dev/null +++ b/vendor/github.com/json-iterator/go/README.md @@ -0,0 +1,91 @@ +[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge) +[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/json-iterator/go) +[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go) +[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go) +[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go) +[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE) +[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) + +A high-performance 100% compatible drop-in replacement of "encoding/json" + +You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go) + +``` +Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com +``` + +# Benchmark + +![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png) + +Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go + +Raw Result (easyjson requires static code generation) + +| | ns/op | allocation bytes | allocation times | +| --- | --- | --- | --- | +| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op | +| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op | +| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op | +| std encode | 2213 ns/op | 712 B/op | 5 allocs/op | +| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op | +| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op | + +Always benchmark with your own workload. +The result depends heavily on the data input. + +# Usage + +100% compatibility with standard lib + +Replace + +```go +import "encoding/json" +json.Marshal(&data) +``` + +with + +```go +import "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Marshal(&data) +``` + +Replace + +```go +import "encoding/json" +json.Unmarshal(input, &data) +``` + +with + +```go +import "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Unmarshal(input, &data) +``` + +[More documentation](http://jsoniter.com/migrate-from-go-std.html) + +# How to get + +``` +go get github.com/json-iterator/go +``` + +# Contribution Welcomed ! + +Contributors + +* [thockin](https://github.com/thockin) +* [mattn](https://github.com/mattn) +* [cch123](https://github.com/cch123) +* [Oleg Shaldybin](https://github.com/olegshaldybin) +* [Jason Toffaletti](https://github.com/toffaletti) + +Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) diff --git a/vendor/github.com/json-iterator/go/adapter.go b/vendor/github.com/json-iterator/go/adapter.go new file mode 100644 index 0000000000..f371bfed76 --- /dev/null +++ b/vendor/github.com/json-iterator/go/adapter.go @@ -0,0 +1,148 @@ +package jsoniter + +import ( + "bytes" + "io" +) + +// RawMessage to make replace json with jsoniter +type RawMessage []byte + +// Unmarshal adapts to json/encoding Unmarshal API +// +// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v. +// Refer to https://godoc.org/encoding/json#Unmarshal for more information +func Unmarshal(data []byte, v interface{}) error { + return ConfigDefault.Unmarshal(data, v) +} + +// UnmarshalFromString convenient method to read from string instead of []byte +func UnmarshalFromString(str string, v interface{}) error { + return ConfigDefault.UnmarshalFromString(str, v) +} + +// Get quick method to get value from deeply nested JSON structure +func Get(data []byte, path ...interface{}) Any { + return ConfigDefault.Get(data, path...) +} + +// Marshal adapts to json/encoding Marshal API +// +// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API +// Refer to https://godoc.org/encoding/json#Marshal for more information +func Marshal(v interface{}) ([]byte, error) { + return ConfigDefault.Marshal(v) +} + +// MarshalIndent same as json.MarshalIndent. Prefix is not supported. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return ConfigDefault.MarshalIndent(v, prefix, indent) +} + +// MarshalToString convenient method to write as string instead of []byte +func MarshalToString(v interface{}) (string, error) { + return ConfigDefault.MarshalToString(v) +} + +// NewDecoder adapts to json/stream NewDecoder API. +// +// NewDecoder returns a new decoder that reads from r. +// +// Instead of a json/encoding Decoder, an Decoder is returned +// Refer to https://godoc.org/encoding/json#NewDecoder for more information +func NewDecoder(reader io.Reader) *Decoder { + return ConfigDefault.NewDecoder(reader) +} + +// Decoder reads and decodes JSON values from an input stream. +// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress) +type Decoder struct { + iter *Iterator +} + +// Decode decode JSON into interface{} +func (adapter *Decoder) Decode(obj interface{}) error { + if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil { + if !adapter.iter.loadMore() { + return io.EOF + } + } + adapter.iter.ReadVal(obj) + err := adapter.iter.Error + if err == io.EOF { + return nil + } + return adapter.iter.Error +} + +// More is there more? +func (adapter *Decoder) More() bool { + iter := adapter.iter + if iter.Error != nil { + return false + } + if iter.head != iter.tail { + return true + } + return iter.loadMore() +} + +// Buffered remaining buffer +func (adapter *Decoder) Buffered() io.Reader { + remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail] + return bytes.NewReader(remaining) +} + +// UseNumber causes the Decoder to unmarshal a number into an interface{} as a +// Number instead of as a float64. +func (adapter *Decoder) UseNumber() { + cfg := adapter.iter.cfg.configBeforeFrozen + cfg.UseNumber = true + adapter.iter.cfg = cfg.frozeWithCacheReuse() +} + +// DisallowUnknownFields causes the Decoder to return an error when the destination +// is a struct and the input contains object keys which do not match any +// non-ignored, exported fields in the destination. +func (adapter *Decoder) DisallowUnknownFields() { + cfg := adapter.iter.cfg.configBeforeFrozen + cfg.DisallowUnknownFields = true + adapter.iter.cfg = cfg.frozeWithCacheReuse() +} + +// NewEncoder same as json.NewEncoder +func NewEncoder(writer io.Writer) *Encoder { + return ConfigDefault.NewEncoder(writer) +} + +// Encoder same as json.Encoder +type Encoder struct { + stream *Stream +} + +// Encode encode interface{} as JSON to io.Writer +func (adapter *Encoder) Encode(val interface{}) error { + adapter.stream.WriteVal(val) + adapter.stream.WriteRaw("\n") + adapter.stream.Flush() + return adapter.stream.Error +} + +// SetIndent set the indention. Prefix is not supported +func (adapter *Encoder) SetIndent(prefix, indent string) { + config := adapter.stream.cfg.configBeforeFrozen + config.IndentionStep = len(indent) + adapter.stream.cfg = config.frozeWithCacheReuse() +} + +// SetEscapeHTML escape html by default, set to false to disable +func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) { + config := adapter.stream.cfg.configBeforeFrozen + config.EscapeHTML = escapeHTML + adapter.stream.cfg = config.frozeWithCacheReuse() +} + +// Valid reports whether data is a valid JSON encoding. +func Valid(data []byte) bool { + return ConfigDefault.Valid(data) +} diff --git a/vendor/github.com/json-iterator/go/any.go b/vendor/github.com/json-iterator/go/any.go new file mode 100644 index 0000000000..daecfed615 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any.go @@ -0,0 +1,321 @@ +package jsoniter + +import ( + "errors" + "fmt" + "github.com/modern-go/reflect2" + "io" + "reflect" + "strconv" + "unsafe" +) + +// Any generic object representation. +// The lazy json implementation holds []byte and parse lazily. +type Any interface { + LastError() error + ValueType() ValueType + MustBeValid() Any + ToBool() bool + ToInt() int + ToInt32() int32 + ToInt64() int64 + ToUint() uint + ToUint32() uint32 + ToUint64() uint64 + ToFloat32() float32 + ToFloat64() float64 + ToString() string + ToVal(val interface{}) + Get(path ...interface{}) Any + Size() int + Keys() []string + GetInterface() interface{} + WriteTo(stream *Stream) +} + +type baseAny struct{} + +func (any *baseAny) Get(path ...interface{}) Any { + return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)} +} + +func (any *baseAny) Size() int { + return 0 +} + +func (any *baseAny) Keys() []string { + return []string{} +} + +func (any *baseAny) ToVal(obj interface{}) { + panic("not implemented") +} + +// WrapInt32 turn int32 into Any interface +func WrapInt32(val int32) Any { + return &int32Any{baseAny{}, val} +} + +// WrapInt64 turn int64 into Any interface +func WrapInt64(val int64) Any { + return &int64Any{baseAny{}, val} +} + +// WrapUint32 turn uint32 into Any interface +func WrapUint32(val uint32) Any { + return &uint32Any{baseAny{}, val} +} + +// WrapUint64 turn uint64 into Any interface +func WrapUint64(val uint64) Any { + return &uint64Any{baseAny{}, val} +} + +// WrapFloat64 turn float64 into Any interface +func WrapFloat64(val float64) Any { + return &floatAny{baseAny{}, val} +} + +// WrapString turn string into Any interface +func WrapString(val string) Any { + return &stringAny{baseAny{}, val} +} + +// Wrap turn a go object into Any interface +func Wrap(val interface{}) Any { + if val == nil { + return &nilAny{} + } + asAny, isAny := val.(Any) + if isAny { + return asAny + } + typ := reflect2.TypeOf(val) + switch typ.Kind() { + case reflect.Slice: + return wrapArray(val) + case reflect.Struct: + return wrapStruct(val) + case reflect.Map: + return wrapMap(val) + case reflect.String: + return WrapString(val.(string)) + case reflect.Int: + if strconv.IntSize == 32 { + return WrapInt32(int32(val.(int))) + } + return WrapInt64(int64(val.(int))) + case reflect.Int8: + return WrapInt32(int32(val.(int8))) + case reflect.Int16: + return WrapInt32(int32(val.(int16))) + case reflect.Int32: + return WrapInt32(val.(int32)) + case reflect.Int64: + return WrapInt64(val.(int64)) + case reflect.Uint: + if strconv.IntSize == 32 { + return WrapUint32(uint32(val.(uint))) + } + return WrapUint64(uint64(val.(uint))) + case reflect.Uintptr: + if ptrSize == 32 { + return WrapUint32(uint32(val.(uintptr))) + } + return WrapUint64(uint64(val.(uintptr))) + case reflect.Uint8: + return WrapUint32(uint32(val.(uint8))) + case reflect.Uint16: + return WrapUint32(uint32(val.(uint16))) + case reflect.Uint32: + return WrapUint32(uint32(val.(uint32))) + case reflect.Uint64: + return WrapUint64(val.(uint64)) + case reflect.Float32: + return WrapFloat64(float64(val.(float32))) + case reflect.Float64: + return WrapFloat64(val.(float64)) + case reflect.Bool: + if val.(bool) == true { + return &trueAny{} + } + return &falseAny{} + } + return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)} +} + +// ReadAny read next JSON element as an Any object. It is a better json.RawMessage. +func (iter *Iterator) ReadAny() Any { + return iter.readAny() +} + +func (iter *Iterator) readAny() Any { + c := iter.nextToken() + switch c { + case '"': + iter.unreadByte() + return &stringAny{baseAny{}, iter.ReadString()} + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + return &nilAny{} + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + return &trueAny{} + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + return &falseAny{} + case '{': + return iter.readObjectAny() + case '[': + return iter.readArrayAny() + case '-': + return iter.readNumberAny(false) + case 0: + return &invalidAny{baseAny{}, errors.New("input is empty")} + default: + return iter.readNumberAny(true) + } +} + +func (iter *Iterator) readNumberAny(positive bool) Any { + iter.startCapture(iter.head - 1) + iter.skipNumber() + lazyBuf := iter.stopCapture() + return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readObjectAny() Any { + iter.startCapture(iter.head - 1) + iter.skipObject() + lazyBuf := iter.stopCapture() + return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readArrayAny() Any { + iter.startCapture(iter.head - 1) + iter.skipArray() + lazyBuf := iter.stopCapture() + return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func locateObjectField(iter *Iterator, target string) []byte { + var found []byte + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + if field == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + return true + }) + return found +} + +func locateArrayElement(iter *Iterator, target int) []byte { + var found []byte + n := 0 + iter.ReadArrayCB(func(iter *Iterator) bool { + if n == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + n++ + return true + }) + return found +} + +func locatePath(iter *Iterator, path []interface{}) Any { + for i, pathKeyObj := range path { + switch pathKey := pathKeyObj.(type) { + case string: + valueBytes := locateObjectField(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int: + valueBytes := locateArrayElement(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int32: + if '*' == pathKey { + return iter.readAny().Get(path[i:]...) + } + return newInvalidAny(path[i:]) + default: + return newInvalidAny(path[i:]) + } + } + if iter.Error != nil && iter.Error != io.EOF { + return &invalidAny{baseAny{}, iter.Error} + } + return iter.readAny() +} + +var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem() + +func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ == anyType { + return &directAnyCodec{} + } + if typ.Implements(anyType) { + return &anyCodec{ + valType: typ, + } + } + return nil +} + +func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ == anyType { + return &directAnyCodec{} + } + if typ.Implements(anyType) { + return &anyCodec{ + valType: typ, + } + } + return nil +} + +type anyCodec struct { + valType reflect2.Type +} + +func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + panic("not implemented") +} + +func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := codec.valType.UnsafeIndirect(ptr) + any := obj.(Any) + any.WriteTo(stream) +} + +func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool { + obj := codec.valType.UnsafeIndirect(ptr) + any := obj.(Any) + return any.Size() == 0 +} + +type directAnyCodec struct { +} + +func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *(*Any)(ptr) = iter.readAny() +} + +func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + any := *(*Any)(ptr) + any.WriteTo(stream) +} + +func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool { + any := *(*Any)(ptr) + return any.Size() == 0 +} diff --git a/vendor/github.com/json-iterator/go/any_array.go b/vendor/github.com/json-iterator/go/any_array.go new file mode 100644 index 0000000000..0449e9aa42 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_array.go @@ -0,0 +1,278 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type arrayLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *arrayLazyAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayLazyAny) MustBeValid() Any { + return any +} + +func (any *arrayLazyAny) LastError() error { + return any.err +} + +func (any *arrayLazyAny) ToBool() bool { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.ReadArray() +} + +func (any *arrayLazyAny) ToInt() int { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt32() int32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt64() int64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint() uint { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint32() uint32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint64() uint64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat32() float32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat64() float64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *arrayLazyAny) ToVal(val interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(val) +} + +func (any *arrayLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateArrayElement(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + arr := make([]Any, 0) + iter.ReadArrayCB(func(iter *Iterator) bool { + found := iter.readAny().Get(path[1:]...) + if found.ValueType() != InvalidValue { + arr = append(arr, found) + } + return true + }) + return wrapArray(arr) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadArrayCB(func(iter *Iterator) bool { + size++ + iter.Skip() + return true + }) + return size +} + +func (any *arrayLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *arrayLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type arrayAny struct { + baseAny + val reflect.Value +} + +func wrapArray(val interface{}) *arrayAny { + return &arrayAny{baseAny{}, reflect.ValueOf(val)} +} + +func (any *arrayAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayAny) MustBeValid() Any { + return any +} + +func (any *arrayAny) LastError() error { + return nil +} + +func (any *arrayAny) ToBool() bool { + return any.val.Len() != 0 +} + +func (any *arrayAny) ToInt() int { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt32() int32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt64() int64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint() uint { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint32() uint32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint64() uint64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat32() float32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat64() float64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToString() string { + str, _ := MarshalToString(any.val.Interface()) + return str +} + +func (any *arrayAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + if firstPath < 0 || firstPath >= any.val.Len() { + return newInvalidAny(path) + } + return Wrap(any.val.Index(firstPath).Interface()) + case int32: + if '*' == firstPath { + mappedAll := make([]Any, 0) + for i := 0; i < any.val.Len(); i++ { + mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll = append(mappedAll, mapped) + } + } + return wrapArray(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayAny) Size() int { + return any.val.Len() +} + +func (any *arrayAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *arrayAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/any_bool.go b/vendor/github.com/json-iterator/go/any_bool.go new file mode 100644 index 0000000000..9452324af5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_bool.go @@ -0,0 +1,137 @@ +package jsoniter + +type trueAny struct { + baseAny +} + +func (any *trueAny) LastError() error { + return nil +} + +func (any *trueAny) ToBool() bool { + return true +} + +func (any *trueAny) ToInt() int { + return 1 +} + +func (any *trueAny) ToInt32() int32 { + return 1 +} + +func (any *trueAny) ToInt64() int64 { + return 1 +} + +func (any *trueAny) ToUint() uint { + return 1 +} + +func (any *trueAny) ToUint32() uint32 { + return 1 +} + +func (any *trueAny) ToUint64() uint64 { + return 1 +} + +func (any *trueAny) ToFloat32() float32 { + return 1 +} + +func (any *trueAny) ToFloat64() float64 { + return 1 +} + +func (any *trueAny) ToString() string { + return "true" +} + +func (any *trueAny) WriteTo(stream *Stream) { + stream.WriteTrue() +} + +func (any *trueAny) Parse() *Iterator { + return nil +} + +func (any *trueAny) GetInterface() interface{} { + return true +} + +func (any *trueAny) ValueType() ValueType { + return BoolValue +} + +func (any *trueAny) MustBeValid() Any { + return any +} + +type falseAny struct { + baseAny +} + +func (any *falseAny) LastError() error { + return nil +} + +func (any *falseAny) ToBool() bool { + return false +} + +func (any *falseAny) ToInt() int { + return 0 +} + +func (any *falseAny) ToInt32() int32 { + return 0 +} + +func (any *falseAny) ToInt64() int64 { + return 0 +} + +func (any *falseAny) ToUint() uint { + return 0 +} + +func (any *falseAny) ToUint32() uint32 { + return 0 +} + +func (any *falseAny) ToUint64() uint64 { + return 0 +} + +func (any *falseAny) ToFloat32() float32 { + return 0 +} + +func (any *falseAny) ToFloat64() float64 { + return 0 +} + +func (any *falseAny) ToString() string { + return "false" +} + +func (any *falseAny) WriteTo(stream *Stream) { + stream.WriteFalse() +} + +func (any *falseAny) Parse() *Iterator { + return nil +} + +func (any *falseAny) GetInterface() interface{} { + return false +} + +func (any *falseAny) ValueType() ValueType { + return BoolValue +} + +func (any *falseAny) MustBeValid() Any { + return any +} diff --git a/vendor/github.com/json-iterator/go/any_float.go b/vendor/github.com/json-iterator/go/any_float.go new file mode 100644 index 0000000000..35fdb09497 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_float.go @@ -0,0 +1,83 @@ +package jsoniter + +import ( + "strconv" +) + +type floatAny struct { + baseAny + val float64 +} + +func (any *floatAny) Parse() *Iterator { + return nil +} + +func (any *floatAny) ValueType() ValueType { + return NumberValue +} + +func (any *floatAny) MustBeValid() Any { + return any +} + +func (any *floatAny) LastError() error { + return nil +} + +func (any *floatAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *floatAny) ToInt() int { + return int(any.val) +} + +func (any *floatAny) ToInt32() int32 { + return int32(any.val) +} + +func (any *floatAny) ToInt64() int64 { + return int64(any.val) +} + +func (any *floatAny) ToUint() uint { + if any.val > 0 { + return uint(any.val) + } + return 0 +} + +func (any *floatAny) ToUint32() uint32 { + if any.val > 0 { + return uint32(any.val) + } + return 0 +} + +func (any *floatAny) ToUint64() uint64 { + if any.val > 0 { + return uint64(any.val) + } + return 0 +} + +func (any *floatAny) ToFloat32() float32 { + return float32(any.val) +} + +func (any *floatAny) ToFloat64() float64 { + return any.val +} + +func (any *floatAny) ToString() string { + return strconv.FormatFloat(any.val, 'E', -1, 64) +} + +func (any *floatAny) WriteTo(stream *Stream) { + stream.WriteFloat64(any.val) +} + +func (any *floatAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_int32.go b/vendor/github.com/json-iterator/go/any_int32.go new file mode 100644 index 0000000000..1b56f39915 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_int32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int32Any struct { + baseAny + val int32 +} + +func (any *int32Any) LastError() error { + return nil +} + +func (any *int32Any) ValueType() ValueType { + return NumberValue +} + +func (any *int32Any) MustBeValid() Any { + return any +} + +func (any *int32Any) ToBool() bool { + return any.val != 0 +} + +func (any *int32Any) ToInt() int { + return int(any.val) +} + +func (any *int32Any) ToInt32() int32 { + return any.val +} + +func (any *int32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *int32Any) ToUint() uint { + return uint(any.val) +} + +func (any *int32Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *int32Any) WriteTo(stream *Stream) { + stream.WriteInt32(any.val) +} + +func (any *int32Any) Parse() *Iterator { + return nil +} + +func (any *int32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_int64.go b/vendor/github.com/json-iterator/go/any_int64.go new file mode 100644 index 0000000000..c440d72b6d --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_int64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int64Any struct { + baseAny + val int64 +} + +func (any *int64Any) LastError() error { + return nil +} + +func (any *int64Any) ValueType() ValueType { + return NumberValue +} + +func (any *int64Any) MustBeValid() Any { + return any +} + +func (any *int64Any) ToBool() bool { + return any.val != 0 +} + +func (any *int64Any) ToInt() int { + return int(any.val) +} + +func (any *int64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *int64Any) ToInt64() int64 { + return any.val +} + +func (any *int64Any) ToUint() uint { + return uint(any.val) +} + +func (any *int64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int64Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int64Any) ToString() string { + return strconv.FormatInt(any.val, 10) +} + +func (any *int64Any) WriteTo(stream *Stream) { + stream.WriteInt64(any.val) +} + +func (any *int64Any) Parse() *Iterator { + return nil +} + +func (any *int64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_invalid.go b/vendor/github.com/json-iterator/go/any_invalid.go new file mode 100644 index 0000000000..1d859eac32 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_invalid.go @@ -0,0 +1,82 @@ +package jsoniter + +import "fmt" + +type invalidAny struct { + baseAny + err error +} + +func newInvalidAny(path []interface{}) *invalidAny { + return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)} +} + +func (any *invalidAny) LastError() error { + return any.err +} + +func (any *invalidAny) ValueType() ValueType { + return InvalidValue +} + +func (any *invalidAny) MustBeValid() Any { + panic(any.err) +} + +func (any *invalidAny) ToBool() bool { + return false +} + +func (any *invalidAny) ToInt() int { + return 0 +} + +func (any *invalidAny) ToInt32() int32 { + return 0 +} + +func (any *invalidAny) ToInt64() int64 { + return 0 +} + +func (any *invalidAny) ToUint() uint { + return 0 +} + +func (any *invalidAny) ToUint32() uint32 { + return 0 +} + +func (any *invalidAny) ToUint64() uint64 { + return 0 +} + +func (any *invalidAny) ToFloat32() float32 { + return 0 +} + +func (any *invalidAny) ToFloat64() float64 { + return 0 +} + +func (any *invalidAny) ToString() string { + return "" +} + +func (any *invalidAny) WriteTo(stream *Stream) { +} + +func (any *invalidAny) Get(path ...interface{}) Any { + if any.err == nil { + return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)} + } + return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)} +} + +func (any *invalidAny) Parse() *Iterator { + return nil +} + +func (any *invalidAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/any_nil.go b/vendor/github.com/json-iterator/go/any_nil.go new file mode 100644 index 0000000000..d04cb54c11 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_nil.go @@ -0,0 +1,69 @@ +package jsoniter + +type nilAny struct { + baseAny +} + +func (any *nilAny) LastError() error { + return nil +} + +func (any *nilAny) ValueType() ValueType { + return NilValue +} + +func (any *nilAny) MustBeValid() Any { + return any +} + +func (any *nilAny) ToBool() bool { + return false +} + +func (any *nilAny) ToInt() int { + return 0 +} + +func (any *nilAny) ToInt32() int32 { + return 0 +} + +func (any *nilAny) ToInt64() int64 { + return 0 +} + +func (any *nilAny) ToUint() uint { + return 0 +} + +func (any *nilAny) ToUint32() uint32 { + return 0 +} + +func (any *nilAny) ToUint64() uint64 { + return 0 +} + +func (any *nilAny) ToFloat32() float32 { + return 0 +} + +func (any *nilAny) ToFloat64() float64 { + return 0 +} + +func (any *nilAny) ToString() string { + return "" +} + +func (any *nilAny) WriteTo(stream *Stream) { + stream.WriteNil() +} + +func (any *nilAny) Parse() *Iterator { + return nil +} + +func (any *nilAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/any_number.go b/vendor/github.com/json-iterator/go/any_number.go new file mode 100644 index 0000000000..9d1e901a66 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_number.go @@ -0,0 +1,123 @@ +package jsoniter + +import ( + "io" + "unsafe" +) + +type numberLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *numberLazyAny) ValueType() ValueType { + return NumberValue +} + +func (any *numberLazyAny) MustBeValid() Any { + return any +} + +func (any *numberLazyAny) LastError() error { + return any.err +} + +func (any *numberLazyAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *numberLazyAny) ToInt() int { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToInt32() int32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt32() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToInt64() int64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt64() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToUint() uint { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToUint32() uint32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint32() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToUint64() uint64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint64() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToFloat32() float32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat32() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToFloat64() float64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat64() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *numberLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *numberLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} diff --git a/vendor/github.com/json-iterator/go/any_object.go b/vendor/github.com/json-iterator/go/any_object.go new file mode 100644 index 0000000000..c44ef5c989 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_object.go @@ -0,0 +1,374 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type objectLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *objectLazyAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectLazyAny) MustBeValid() Any { + return any +} + +func (any *objectLazyAny) LastError() error { + return any.err +} + +func (any *objectLazyAny) ToBool() bool { + return true +} + +func (any *objectLazyAny) ToInt() int { + return 0 +} + +func (any *objectLazyAny) ToInt32() int32 { + return 0 +} + +func (any *objectLazyAny) ToInt64() int64 { + return 0 +} + +func (any *objectLazyAny) ToUint() uint { + return 0 +} + +func (any *objectLazyAny) ToUint32() uint32 { + return 0 +} + +func (any *objectLazyAny) ToUint64() uint64 { + return 0 +} + +func (any *objectLazyAny) ToFloat32() float32 { + return 0 +} + +func (any *objectLazyAny) ToFloat64() float64 { + return 0 +} + +func (any *objectLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *objectLazyAny) ToVal(obj interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(obj) +} + +func (any *objectLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateObjectField(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + mapped := locatePath(iter, path[1:]) + if mapped.ValueType() != InvalidValue { + mappedAll[field] = mapped + } + return true + }) + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectLazyAny) Keys() []string { + keys := []string{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + iter.Skip() + keys = append(keys, field) + return true + }) + return keys +} + +func (any *objectLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + size++ + return true + }) + return size +} + +func (any *objectLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *objectLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type objectAny struct { + baseAny + err error + val reflect.Value +} + +func wrapStruct(val interface{}) *objectAny { + return &objectAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *objectAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectAny) MustBeValid() Any { + return any +} + +func (any *objectAny) Parse() *Iterator { + return nil +} + +func (any *objectAny) LastError() error { + return any.err +} + +func (any *objectAny) ToBool() bool { + return any.val.NumField() != 0 +} + +func (any *objectAny) ToInt() int { + return 0 +} + +func (any *objectAny) ToInt32() int32 { + return 0 +} + +func (any *objectAny) ToInt64() int64 { + return 0 +} + +func (any *objectAny) ToUint() uint { + return 0 +} + +func (any *objectAny) ToUint32() uint32 { + return 0 +} + +func (any *objectAny) ToUint64() uint64 { + return 0 +} + +func (any *objectAny) ToFloat32() float32 { + return 0 +} + +func (any *objectAny) ToFloat64() float64 { + return 0 +} + +func (any *objectAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *objectAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + field := any.val.FieldByName(firstPath) + if !field.IsValid() { + return newInvalidAny(path) + } + return Wrap(field.Interface()) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for i := 0; i < any.val.NumField(); i++ { + field := any.val.Field(i) + if field.CanInterface() { + mapped := Wrap(field.Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[any.val.Type().Field(i).Name] = mapped + } + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectAny) Keys() []string { + keys := make([]string, 0, any.val.NumField()) + for i := 0; i < any.val.NumField(); i++ { + keys = append(keys, any.val.Type().Field(i).Name) + } + return keys +} + +func (any *objectAny) Size() int { + return any.val.NumField() +} + +func (any *objectAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *objectAny) GetInterface() interface{} { + return any.val.Interface() +} + +type mapAny struct { + baseAny + err error + val reflect.Value +} + +func wrapMap(val interface{}) *mapAny { + return &mapAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *mapAny) ValueType() ValueType { + return ObjectValue +} + +func (any *mapAny) MustBeValid() Any { + return any +} + +func (any *mapAny) Parse() *Iterator { + return nil +} + +func (any *mapAny) LastError() error { + return any.err +} + +func (any *mapAny) ToBool() bool { + return true +} + +func (any *mapAny) ToInt() int { + return 0 +} + +func (any *mapAny) ToInt32() int32 { + return 0 +} + +func (any *mapAny) ToInt64() int64 { + return 0 +} + +func (any *mapAny) ToUint() uint { + return 0 +} + +func (any *mapAny) ToUint32() uint32 { + return 0 +} + +func (any *mapAny) ToUint64() uint64 { + return 0 +} + +func (any *mapAny) ToFloat32() float32 { + return 0 +} + +func (any *mapAny) ToFloat64() float64 { + return 0 +} + +func (any *mapAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *mapAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for _, key := range any.val.MapKeys() { + keyAsStr := key.String() + element := Wrap(any.val.MapIndex(key).Interface()) + mapped := element.Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[keyAsStr] = mapped + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + value := any.val.MapIndex(reflect.ValueOf(firstPath)) + if !value.IsValid() { + return newInvalidAny(path) + } + return Wrap(value.Interface()) + } +} + +func (any *mapAny) Keys() []string { + keys := make([]string, 0, any.val.Len()) + for _, key := range any.val.MapKeys() { + keys = append(keys, key.String()) + } + return keys +} + +func (any *mapAny) Size() int { + return any.val.Len() +} + +func (any *mapAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *mapAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/any_str.go b/vendor/github.com/json-iterator/go/any_str.go new file mode 100644 index 0000000000..a4b93c78c8 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_str.go @@ -0,0 +1,166 @@ +package jsoniter + +import ( + "fmt" + "strconv" +) + +type stringAny struct { + baseAny + val string +} + +func (any *stringAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)} +} + +func (any *stringAny) Parse() *Iterator { + return nil +} + +func (any *stringAny) ValueType() ValueType { + return StringValue +} + +func (any *stringAny) MustBeValid() Any { + return any +} + +func (any *stringAny) LastError() error { + return nil +} + +func (any *stringAny) ToBool() bool { + str := any.ToString() + if str == "0" { + return false + } + for _, c := range str { + switch c { + case ' ', '\n', '\r', '\t': + default: + return true + } + } + return false +} + +func (any *stringAny) ToInt() int { + return int(any.ToInt64()) + +} + +func (any *stringAny) ToInt32() int32 { + return int32(any.ToInt64()) +} + +func (any *stringAny) ToInt64() int64 { + if any.val == "" { + return 0 + } + + flag := 1 + startPos := 0 + endPos := 0 + if any.val[0] == '+' || any.val[0] == '-' { + startPos = 1 + } + + if any.val[0] == '-' { + flag = -1 + } + + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64) + return int64(flag) * parsed +} + +func (any *stringAny) ToUint() uint { + return uint(any.ToUint64()) +} + +func (any *stringAny) ToUint32() uint32 { + return uint32(any.ToUint64()) +} + +func (any *stringAny) ToUint64() uint64 { + if any.val == "" { + return 0 + } + + startPos := 0 + endPos := 0 + + if any.val[0] == '-' { + return 0 + } + if any.val[0] == '+' { + startPos = 1 + } + + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64) + return parsed +} + +func (any *stringAny) ToFloat32() float32 { + return float32(any.ToFloat64()) +} + +func (any *stringAny) ToFloat64() float64 { + if len(any.val) == 0 { + return 0 + } + + // first char invalid + if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') { + return 0 + } + + // extract valid num expression from string + // eg 123true => 123, -12.12xxa => -12.12 + endPos := 1 + for i := 1; i < len(any.val); i++ { + if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' { + endPos = i + 1 + continue + } + + // end position is the first char which is not digit + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + endPos = i + break + } + } + parsed, _ := strconv.ParseFloat(any.val[:endPos], 64) + return parsed +} + +func (any *stringAny) ToString() string { + return any.val +} + +func (any *stringAny) WriteTo(stream *Stream) { + stream.WriteString(any.val) +} + +func (any *stringAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_uint32.go b/vendor/github.com/json-iterator/go/any_uint32.go new file mode 100644 index 0000000000..656bbd33d7 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_uint32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint32Any struct { + baseAny + val uint32 +} + +func (any *uint32Any) LastError() error { + return nil +} + +func (any *uint32Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint32Any) MustBeValid() Any { + return any +} + +func (any *uint32Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint32Any) ToInt() int { + return int(any.val) +} + +func (any *uint32Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint32Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint32Any) ToUint32() uint32 { + return any.val +} + +func (any *uint32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *uint32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *uint32Any) WriteTo(stream *Stream) { + stream.WriteUint32(any.val) +} + +func (any *uint32Any) Parse() *Iterator { + return nil +} + +func (any *uint32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_uint64.go b/vendor/github.com/json-iterator/go/any_uint64.go new file mode 100644 index 0000000000..7df2fce33b --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_uint64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint64Any struct { + baseAny + val uint64 +} + +func (any *uint64Any) LastError() error { + return nil +} + +func (any *uint64Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint64Any) MustBeValid() Any { + return any +} + +func (any *uint64Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint64Any) ToInt() int { + return int(any.val) +} + +func (any *uint64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint64Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint64Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *uint64Any) ToUint64() uint64 { + return any.val +} + +func (any *uint64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint64Any) ToString() string { + return strconv.FormatUint(any.val, 10) +} + +func (any *uint64Any) WriteTo(stream *Stream) { + stream.WriteUint64(any.val) +} + +func (any *uint64Any) Parse() *Iterator { + return nil +} + +func (any *uint64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh new file mode 100755 index 0000000000..b45ef68831 --- /dev/null +++ b/vendor/github.com/json-iterator/go/build.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e +set -x + +if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then + mkdir -p /tmp/build-golang/src/github.com/json-iterator + ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go +fi +export GOPATH=/tmp/build-golang +go get -u github.com/golang/dep/cmd/dep +cd /tmp/build-golang/src/github.com/json-iterator/go +exec $GOPATH/bin/dep ensure -update diff --git a/vendor/github.com/json-iterator/go/config.go b/vendor/github.com/json-iterator/go/config.go new file mode 100644 index 0000000000..835819129c --- /dev/null +++ b/vendor/github.com/json-iterator/go/config.go @@ -0,0 +1,372 @@ +package jsoniter + +import ( + "encoding/json" + "io" + "reflect" + "sync" + "unsafe" + + "github.com/modern-go/concurrent" + "github.com/modern-go/reflect2" +) + +// Config customize how the API should behave. +// The API is created from Config by Froze. +type Config struct { + IndentionStep int + MarshalFloatWith6Digits bool + EscapeHTML bool + SortMapKeys bool + UseNumber bool + DisallowUnknownFields bool + TagKey string + OnlyTaggedField bool + ValidateJsonRawMessage bool + ObjectFieldMustBeSimpleString bool + CaseSensitive bool +} + +// API the public interface of this package. +// Primary Marshal and Unmarshal. +type API interface { + IteratorPool + StreamPool + MarshalToString(v interface{}) (string, error) + Marshal(v interface{}) ([]byte, error) + MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) + UnmarshalFromString(str string, v interface{}) error + Unmarshal(data []byte, v interface{}) error + Get(data []byte, path ...interface{}) Any + NewEncoder(writer io.Writer) *Encoder + NewDecoder(reader io.Reader) *Decoder + Valid(data []byte) bool + RegisterExtension(extension Extension) + DecoderOf(typ reflect2.Type) ValDecoder + EncoderOf(typ reflect2.Type) ValEncoder +} + +// ConfigDefault the default API +var ConfigDefault = Config{ + EscapeHTML: true, +}.Froze() + +// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior +var ConfigCompatibleWithStandardLibrary = Config{ + EscapeHTML: true, + SortMapKeys: true, + ValidateJsonRawMessage: true, +}.Froze() + +// ConfigFastest marshals float with only 6 digits precision +var ConfigFastest = Config{ + EscapeHTML: false, + MarshalFloatWith6Digits: true, // will lose precession + ObjectFieldMustBeSimpleString: true, // do not unescape object field +}.Froze() + +type frozenConfig struct { + configBeforeFrozen Config + sortMapKeys bool + indentionStep int + objectFieldMustBeSimpleString bool + onlyTaggedField bool + disallowUnknownFields bool + decoderCache *concurrent.Map + encoderCache *concurrent.Map + extensions []Extension + streamPool *sync.Pool + iteratorPool *sync.Pool + caseSensitive bool +} + +func (cfg *frozenConfig) initCache() { + cfg.decoderCache = concurrent.NewMap() + cfg.encoderCache = concurrent.NewMap() +} + +func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) { + cfg.decoderCache.Store(cacheKey, decoder) +} + +func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) { + cfg.encoderCache.Store(cacheKey, encoder) +} + +func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder { + decoder, found := cfg.decoderCache.Load(cacheKey) + if found { + return decoder.(ValDecoder) + } + return nil +} + +func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder { + encoder, found := cfg.encoderCache.Load(cacheKey) + if found { + return encoder.(ValEncoder) + } + return nil +} + +var cfgCache = concurrent.NewMap() + +func getFrozenConfigFromCache(cfg Config) *frozenConfig { + obj, found := cfgCache.Load(cfg) + if found { + return obj.(*frozenConfig) + } + return nil +} + +func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) { + cfgCache.Store(cfg, frozenConfig) +} + +// Froze forge API from config +func (cfg Config) Froze() API { + api := &frozenConfig{ + sortMapKeys: cfg.SortMapKeys, + indentionStep: cfg.IndentionStep, + objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString, + onlyTaggedField: cfg.OnlyTaggedField, + disallowUnknownFields: cfg.DisallowUnknownFields, + caseSensitive: cfg.CaseSensitive, + } + api.streamPool = &sync.Pool{ + New: func() interface{} { + return NewStream(api, nil, 512) + }, + } + api.iteratorPool = &sync.Pool{ + New: func() interface{} { + return NewIterator(api) + }, + } + api.initCache() + encoderExtension := EncoderExtension{} + decoderExtension := DecoderExtension{} + if cfg.MarshalFloatWith6Digits { + api.marshalFloatWith6Digits(encoderExtension) + } + if cfg.EscapeHTML { + api.escapeHTML(encoderExtension) + } + if cfg.UseNumber { + api.useNumber(decoderExtension) + } + if cfg.ValidateJsonRawMessage { + api.validateJsonRawMessage(encoderExtension) + } + if len(encoderExtension) > 0 { + api.extensions = append(api.extensions, encoderExtension) + } + if len(decoderExtension) > 0 { + api.extensions = append(api.extensions, decoderExtension) + } + api.configBeforeFrozen = cfg + return api +} + +func (cfg Config) frozeWithCacheReuse() *frozenConfig { + api := getFrozenConfigFromCache(cfg) + if api != nil { + return api + } + api = cfg.Froze().(*frozenConfig) + addFrozenConfigToCache(cfg, api) + return api +} + +func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) { + encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) { + rawMessage := *(*json.RawMessage)(ptr) + iter := cfg.BorrowIterator([]byte(rawMessage)) + iter.Read() + if iter.Error != nil { + stream.WriteRaw("null") + } else { + cfg.ReturnIterator(iter) + stream.WriteRaw(string(rawMessage)) + } + }, func(ptr unsafe.Pointer) bool { + return false + }} + extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder + extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder +} + +func (cfg *frozenConfig) useNumber(extension DecoderExtension) { + extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) { + exitingValue := *((*interface{})(ptr)) + if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr { + iter.ReadVal(exitingValue) + return + } + if iter.WhatIsNext() == NumberValue { + *((*interface{})(ptr)) = json.Number(iter.readNumberAsString()) + } else { + *((*interface{})(ptr)) = iter.Read() + } + }} +} +func (cfg *frozenConfig) getTagKey() string { + tagKey := cfg.configBeforeFrozen.TagKey + if tagKey == "" { + return "json" + } + return tagKey +} + +func (cfg *frozenConfig) RegisterExtension(extension Extension) { + cfg.extensions = append(cfg.extensions, extension) +} + +type lossyFloat32Encoder struct { +} + +func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32Lossy(*((*float32)(ptr))) +} + +func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type lossyFloat64Encoder struct { +} + +func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64Lossy(*((*float64)(ptr))) +} + +func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +// EnableLossyFloatMarshalling keeps 10**(-6) precision +// for float variables for better performance. +func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) { + // for better performance + extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{} + extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{} +} + +type htmlEscapedStringEncoder struct { +} + +func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteStringWithHTMLEscaped(str) +} + +func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) { + encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{} +} + +func (cfg *frozenConfig) cleanDecoders() { + typeDecoders = map[string]ValDecoder{} + fieldDecoders = map[string]ValDecoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) cleanEncoders() { + typeEncoders = map[string]ValEncoder{} + fieldEncoders = map[string]ValEncoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return "", stream.Error + } + return string(stream.Buffer()), nil +} + +func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return nil, stream.Error + } + result := stream.Buffer() + copied := make([]byte, len(result)) + copy(copied, result) + return copied, nil +} + +func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + if prefix != "" { + panic("prefix is not supported") + } + for _, r := range indent { + if r != ' ' { + panic("indent can only be space") + } + } + newCfg := cfg.configBeforeFrozen + newCfg.IndentionStep = len(indent) + return newCfg.frozeWithCacheReuse().Marshal(v) +} + +func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error { + data := []byte(str) + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.ReadVal(v) + c := iter.nextToken() + if c == 0 { + if iter.Error == io.EOF { + return nil + } + return iter.Error + } + iter.ReportError("Unmarshal", "there are bytes left after unmarshal") + return iter.Error +} + +func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + return locatePath(iter, path) +} + +func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.ReadVal(v) + c := iter.nextToken() + if c == 0 { + if iter.Error == io.EOF { + return nil + } + return iter.Error + } + iter.ReportError("Unmarshal", "there are bytes left after unmarshal") + return iter.Error +} + +func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder { + stream := NewStream(cfg, writer, 512) + return &Encoder{stream} +} + +func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder { + iter := Parse(cfg, reader, 512) + return &Decoder{iter} +} + +func (cfg *frozenConfig) Valid(data []byte) bool { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.Skip() + return iter.Error == nil +} diff --git a/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md new file mode 100644 index 0000000000..3095662b06 --- /dev/null +++ b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md @@ -0,0 +1,7 @@ +| json type \ dest type | bool | int | uint | float |string| +| --- | --- | --- | --- |--|--| +| number | positive => true
negative => true
zero => false| 23.2 => 23
-32.1 => -32| 12.1 => 12
-12.1 => 0|as normal|same as origin| +| string | empty string => false
string "0" => false
other strings => true | "123.32" => 123
"-123.4" => -123
"123.23xxxw" => 123
"abcde12" => 0
"-32.1" => -32| 13.2 => 13
-1.1 => 0 |12.1 => 12.1
-12.3 => -12.3
12.4xxa => 12.4
+1.1e2 =>110 |same as origin| +| bool | true => true
false => false| true => 1
false => 0 | true => 1
false => 0 |true => 1
false => 0|true => "true"
false => "false"| +| object | true | 0 | 0 |0|originnal json| +| array | empty array => false
nonempty array => true| [] => 0
[1,2] => 1 | [] => 0
[1,2] => 1 |[] => 0
[1,2] => 1|original json| \ No newline at end of file diff --git a/vendor/github.com/json-iterator/go/iter.go b/vendor/github.com/json-iterator/go/iter.go new file mode 100644 index 0000000000..95ae54fbfe --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter.go @@ -0,0 +1,322 @@ +package jsoniter + +import ( + "encoding/json" + "fmt" + "io" +) + +// ValueType the type for JSON element +type ValueType int + +const ( + // InvalidValue invalid JSON element + InvalidValue ValueType = iota + // StringValue JSON element "string" + StringValue + // NumberValue JSON element 100 or 0.10 + NumberValue + // NilValue JSON element null + NilValue + // BoolValue JSON element true or false + BoolValue + // ArrayValue JSON element [] + ArrayValue + // ObjectValue JSON element {} + ObjectValue +) + +var hexDigits []byte +var valueTypes []ValueType + +func init() { + hexDigits = make([]byte, 256) + for i := 0; i < len(hexDigits); i++ { + hexDigits[i] = 255 + } + for i := '0'; i <= '9'; i++ { + hexDigits[i] = byte(i - '0') + } + for i := 'a'; i <= 'f'; i++ { + hexDigits[i] = byte((i - 'a') + 10) + } + for i := 'A'; i <= 'F'; i++ { + hexDigits[i] = byte((i - 'A') + 10) + } + valueTypes = make([]ValueType, 256) + for i := 0; i < len(valueTypes); i++ { + valueTypes[i] = InvalidValue + } + valueTypes['"'] = StringValue + valueTypes['-'] = NumberValue + valueTypes['0'] = NumberValue + valueTypes['1'] = NumberValue + valueTypes['2'] = NumberValue + valueTypes['3'] = NumberValue + valueTypes['4'] = NumberValue + valueTypes['5'] = NumberValue + valueTypes['6'] = NumberValue + valueTypes['7'] = NumberValue + valueTypes['8'] = NumberValue + valueTypes['9'] = NumberValue + valueTypes['t'] = BoolValue + valueTypes['f'] = BoolValue + valueTypes['n'] = NilValue + valueTypes['['] = ArrayValue + valueTypes['{'] = ObjectValue +} + +// Iterator is a io.Reader like object, with JSON specific read functions. +// Error is not returned as return value, but stored as Error member on this iterator instance. +type Iterator struct { + cfg *frozenConfig + reader io.Reader + buf []byte + head int + tail int + captureStartedAt int + captured []byte + Error error + Attachment interface{} // open for customized decoder +} + +// NewIterator creates an empty Iterator instance +func NewIterator(cfg API) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: nil, + head: 0, + tail: 0, + } +} + +// Parse creates an Iterator instance from io.Reader +func Parse(cfg API, reader io.Reader, bufSize int) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: reader, + buf: make([]byte, bufSize), + head: 0, + tail: 0, + } +} + +// ParseBytes creates an Iterator instance from byte array +func ParseBytes(cfg API, input []byte) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: input, + head: 0, + tail: len(input), + } +} + +// ParseString creates an Iterator instance from string +func ParseString(cfg API, input string) *Iterator { + return ParseBytes(cfg, []byte(input)) +} + +// Pool returns a pool can provide more iterator with same configuration +func (iter *Iterator) Pool() IteratorPool { + return iter.cfg +} + +// Reset reuse iterator instance by specifying another reader +func (iter *Iterator) Reset(reader io.Reader) *Iterator { + iter.reader = reader + iter.head = 0 + iter.tail = 0 + return iter +} + +// ResetBytes reuse iterator instance by specifying another byte array as input +func (iter *Iterator) ResetBytes(input []byte) *Iterator { + iter.reader = nil + iter.buf = input + iter.head = 0 + iter.tail = len(input) + return iter +} + +// WhatIsNext gets ValueType of relatively next json element +func (iter *Iterator) WhatIsNext() ValueType { + valueType := valueTypes[iter.nextToken()] + iter.unreadByte() + return valueType +} + +func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + return false + } + return true +} + +func (iter *Iterator) isObjectEnd() bool { + c := iter.nextToken() + if c == ',' { + return false + } + if c == '}' { + return true + } + iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c})) + return true +} + +func (iter *Iterator) nextToken() byte { + // a variation of skip whitespaces, returning the next non-whitespace token + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + 1 + return c + } + if !iter.loadMore() { + return 0 + } + } +} + +// ReportError record a error in iterator instance with current position. +func (iter *Iterator) ReportError(operation string, msg string) { + if iter.Error != nil { + if iter.Error != io.EOF { + return + } + } + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + peekEnd := iter.head + 10 + if peekEnd > iter.tail { + peekEnd = iter.tail + } + parsing := string(iter.buf[peekStart:peekEnd]) + contextStart := iter.head - 50 + if contextStart < 0 { + contextStart = 0 + } + contextEnd := iter.head + 50 + if contextEnd > iter.tail { + contextEnd = iter.tail + } + context := string(iter.buf[contextStart:contextEnd]) + iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...", + operation, msg, iter.head-peekStart, parsing, context) +} + +// CurrentBuffer gets current buffer as string for debugging purpose +func (iter *Iterator) CurrentBuffer() string { + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head, + string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) +} + +func (iter *Iterator) readByte() (ret byte) { + if iter.head == iter.tail { + if iter.loadMore() { + ret = iter.buf[iter.head] + iter.head++ + return ret + } + return 0 + } + ret = iter.buf[iter.head] + iter.head++ + return ret +} + +func (iter *Iterator) loadMore() bool { + if iter.reader == nil { + if iter.Error == nil { + iter.head = iter.tail + iter.Error = io.EOF + } + return false + } + if iter.captured != nil { + iter.captured = append(iter.captured, + iter.buf[iter.captureStartedAt:iter.tail]...) + iter.captureStartedAt = 0 + } + for { + n, err := iter.reader.Read(iter.buf) + if n == 0 { + if err != nil { + if iter.Error == nil { + iter.Error = err + } + return false + } + } else { + iter.head = 0 + iter.tail = n + return true + } + } +} + +func (iter *Iterator) unreadByte() { + if iter.Error != nil { + return + } + iter.head-- + return +} + +// Read read the next JSON element as generic interface{}. +func (iter *Iterator) Read() interface{} { + valueType := iter.WhatIsNext() + switch valueType { + case StringValue: + return iter.ReadString() + case NumberValue: + if iter.cfg.configBeforeFrozen.UseNumber { + return json.Number(iter.readNumberAsString()) + } + return iter.ReadFloat64() + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + return nil + case BoolValue: + return iter.ReadBool() + case ArrayValue: + arr := []interface{}{} + iter.ReadArrayCB(func(iter *Iterator) bool { + var elem interface{} + iter.ReadVal(&elem) + arr = append(arr, elem) + return true + }) + return arr + case ObjectValue: + obj := map[string]interface{}{} + iter.ReadMapCB(func(Iter *Iterator, field string) bool { + var elem interface{} + iter.ReadVal(&elem) + obj[field] = elem + return true + }) + return obj + default: + iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType)) + return nil + } +} diff --git a/vendor/github.com/json-iterator/go/iter_array.go b/vendor/github.com/json-iterator/go/iter_array.go new file mode 100644 index 0000000000..6188cb4577 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_array.go @@ -0,0 +1,58 @@ +package jsoniter + +// ReadArray read array element, tells if the array has more element to read. +func (iter *Iterator) ReadArray() (ret bool) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return false // null + case '[': + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + return true + } + return false + case ']': + return false + case ',': + return true + default: + iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c})) + return + } +} + +// ReadArrayCB read array with callback +func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) { + c := iter.nextToken() + if c == '[' { + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + if !callback(iter) { + return false + } + c = iter.nextToken() + for c == ',' { + if !callback(iter) { + return false + } + c = iter.nextToken() + } + if c != ']' { + iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c})) + return false + } + return true + } + return true + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c})) + return false +} diff --git a/vendor/github.com/json-iterator/go/iter_float.go b/vendor/github.com/json-iterator/go/iter_float.go new file mode 100644 index 0000000000..4f883c0959 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_float.go @@ -0,0 +1,347 @@ +package jsoniter + +import ( + "encoding/json" + "io" + "math/big" + "strconv" + "strings" + "unsafe" +) + +var floatDigits []int8 + +const invalidCharForNumber = int8(-1) +const endOfNumber = int8(-2) +const dotInNumber = int8(-3) + +func init() { + floatDigits = make([]int8, 256) + for i := 0; i < len(floatDigits); i++ { + floatDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + floatDigits[i] = i - int8('0') + } + floatDigits[','] = endOfNumber + floatDigits[']'] = endOfNumber + floatDigits['}'] = endOfNumber + floatDigits[' '] = endOfNumber + floatDigits['\t'] = endOfNumber + floatDigits['\n'] = endOfNumber + floatDigits['.'] = dotInNumber +} + +// ReadBigFloat read big.Float +func (iter *Iterator) ReadBigFloat() (ret *big.Float) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + prec := 64 + if len(str) > prec { + prec = len(str) + } + val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero) + if err != nil { + iter.Error = err + return nil + } + return val +} + +// ReadBigInt read big.Int +func (iter *Iterator) ReadBigInt() (ret *big.Int) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + ret = big.NewInt(0) + var success bool + ret, success = ret.SetString(str, 10) + if !success { + iter.ReportError("ReadBigInt", "invalid big int") + return nil + } + return ret +} + +//ReadFloat32 read float32 +func (iter *Iterator) ReadFloat32() (ret float32) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat32() + } + iter.unreadByte() + return iter.readPositiveFloat32() +} + +func (iter *Iterator) readPositiveFloat32() (ret float32) { + value := uint64(0) + c := byte(' ') + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.ReportError("readFloat32", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat32", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat32", "leading zero is invalid") + return + } + } + value = uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.head = i + return float32(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat32SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float32(float64(value) / float64(pow10[decimalPlaces])) + } + // too many decimal places + return iter.readFloat32SlowPath() + case invalidCharForNumber: + fallthrough + case dotInNumber: + return iter.readFloat32SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat32SlowPath() +} + +func (iter *Iterator) readNumberAsString() (ret string) { + strBuf := [16]byte{} + str := strBuf[0:0] +load_loop: + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + str = append(str, c) + continue + default: + iter.head = i + break load_loop + } + } + if !iter.loadMore() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + return + } + if len(str) == 0 { + iter.ReportError("readNumberAsString", "invalid number") + } + return *(*string)(unsafe.Pointer(&str)) +} + +func (iter *Iterator) readFloat32SlowPath() (ret float32) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat32SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 32) + if err != nil { + iter.Error = err + return + } + return float32(val) +} + +// ReadFloat64 read float64 +func (iter *Iterator) ReadFloat64() (ret float64) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat64() + } + iter.unreadByte() + return iter.readPositiveFloat64() +} + +func (iter *Iterator) readPositiveFloat64() (ret float64) { + value := uint64(0) + c := byte(' ') + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.ReportError("readFloat64", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat64", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat64", "leading zero is invalid") + return + } + } + value = uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.head = i + return float64(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat64SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float64(value) / float64(pow10[decimalPlaces]) + } + // too many decimal places + return iter.readFloat64SlowPath() + case invalidCharForNumber: + fallthrough + case dotInNumber: + return iter.readFloat64SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat64SlowPath() +} + +func (iter *Iterator) readFloat64SlowPath() (ret float64) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat64SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 64) + if err != nil { + iter.Error = err + return + } + return val +} + +func validateFloat(str string) string { + // strconv.ParseFloat is not validating `1.` or `1.e1` + if len(str) == 0 { + return "empty number" + } + if str[0] == '-' { + return "-- is not valid" + } + dotPos := strings.IndexByte(str, '.') + if dotPos != -1 { + if dotPos == len(str)-1 { + return "dot can not be last character" + } + switch str[dotPos+1] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + return "missing digit after dot" + } + } + return "" +} + +// ReadNumber read json.Number +func (iter *Iterator) ReadNumber() (ret json.Number) { + return json.Number(iter.readNumberAsString()) +} diff --git a/vendor/github.com/json-iterator/go/iter_int.go b/vendor/github.com/json-iterator/go/iter_int.go new file mode 100644 index 0000000000..2142320355 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_int.go @@ -0,0 +1,345 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var intDigits []int8 + +const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1 +const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1 + +func init() { + intDigits = make([]int8, 256) + for i := 0; i < len(intDigits); i++ { + intDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + intDigits[i] = i - int8('0') + } +} + +// ReadUint read uint +func (iter *Iterator) ReadUint() uint { + if strconv.IntSize == 32 { + return uint(iter.ReadUint32()) + } + return uint(iter.ReadUint64()) +} + +// ReadInt read int +func (iter *Iterator) ReadInt() int { + if strconv.IntSize == 32 { + return int(iter.ReadInt32()) + } + return int(iter.ReadInt64()) +} + +// ReadInt8 read int8 +func (iter *Iterator) ReadInt8() (ret int8) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt8+1 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int8(val) + } + val := iter.readUint32(c) + if val > math.MaxInt8 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int8(val) +} + +// ReadUint8 read uint8 +func (iter *Iterator) ReadUint8() (ret uint8) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint8 { + iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint8(val) +} + +// ReadInt16 read int16 +func (iter *Iterator) ReadInt16() (ret int16) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt16+1 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int16(val) + } + val := iter.readUint32(c) + if val > math.MaxInt16 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int16(val) +} + +// ReadUint16 read uint16 +func (iter *Iterator) ReadUint16() (ret uint16) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint16 { + iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint16(val) +} + +// ReadInt32 read int32 +func (iter *Iterator) ReadInt32() (ret int32) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt32+1 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int32(val) + } + val := iter.readUint32(c) + if val > math.MaxInt32 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int32(val) +} + +// ReadUint32 read uint32 +func (iter *Iterator) ReadUint32() (ret uint32) { + return iter.readUint32(iter.nextToken()) +} + +func (iter *Iterator) readUint32(c byte) (ret uint32) { + ind := intDigits[c] + if ind == 0 { + iter.assertInteger() + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint32(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10 + uint32(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100 + uint32(ind2)*10 + uint32(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + iter.assertInteger() + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + if value > uint32SafeToMultiply10 { + value2 := (value << 3) + (value << 1) + uint32(ind) + if value2 < value { + iter.ReportError("readUint32", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint32(ind) + } + if !iter.loadMore() { + iter.assertInteger() + return value + } + } +} + +// ReadInt64 read int64 +func (iter *Iterator) ReadInt64() (ret int64) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint64(iter.readByte()) + if val > math.MaxInt64+1 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return -int64(val) + } + val := iter.readUint64(c) + if val > math.MaxInt64 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return int64(val) +} + +// ReadUint64 read uint64 +func (iter *Iterator) ReadUint64() uint64 { + return iter.readUint64(iter.nextToken()) +} + +func (iter *Iterator) readUint64(c byte) (ret uint64) { + ind := intDigits[c] + if ind == 0 { + iter.assertInteger() + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint64(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10 + uint64(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100 + uint64(ind2)*10 + uint64(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + iter.assertInteger() + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + if value > uint64SafeToMultiple10 { + value2 := (value << 3) + (value << 1) + uint64(ind) + if value2 < value { + iter.ReportError("readUint64", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint64(ind) + } + if !iter.loadMore() { + iter.assertInteger() + return value + } + } +} + +func (iter *Iterator) assertInteger() { + if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' { + iter.ReportError("assertInteger", "can not decode float as int") + } +} diff --git a/vendor/github.com/json-iterator/go/iter_object.go b/vendor/github.com/json-iterator/go/iter_object.go new file mode 100644 index 0000000000..6e7c370abe --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_object.go @@ -0,0 +1,252 @@ +package jsoniter + +import ( + "fmt" + "unicode" +) + +// ReadObject read one field from object. +// If object ended, returns empty string. +// Otherwise, returns the field name. +func (iter *Iterator) ReadObject() (ret string) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return "" // null + case '{': + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + } + if c == '}' { + return "" // end of object + } + iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c})) + return + case ',': + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + case '}': + return "" // end of object + default: + iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c}))) + return + } +} + +// CaseInsensitive +func (iter *Iterator) readFieldHash() int64 { + hash := int64(0x811c9dc5) + c := iter.nextToken() + if c != '"' { + iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c})) + return 0 + } + for { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + b := iter.buf[i] + if b == '\\' { + iter.head = i + for _, b := range iter.readStringSlowPath() { + if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return hash + } + if b == '"' { + iter.head = i + 1 + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return hash + } + if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + if !iter.loadMore() { + iter.ReportError("readFieldHash", `incomplete field name`) + return 0 + } + } +} + +func calcHash(str string, caseSensitive bool) int64 { + hash := int64(0x811c9dc5) + for _, b := range str { + if caseSensitive { + hash ^= int64(b) + } else { + hash ^= int64(unicode.ToLower(b)) + } + hash *= 0x1000193 + } + return int64(hash) +} + +// ReadObjectCB read object with callback, the key is ascii only and field name not copied +func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + var field string + if c == '{' { + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + for c == ',' { + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadObjectCB", `object not ended with }`) + return false + } + return true + } + if c == '}' { + return true + } + iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c})) + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +// ReadMapCB read map with callback, the key can be any string +func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field := iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return false + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + for c == ',' { + field = iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return false + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadMapCB", `object not ended with }`) + return false + } + return true + } + if c == '}' { + return true + } + iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c})) + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectStart() bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '}' { + return false + } + iter.unreadByte() + return true + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return false + } + iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) { + str := iter.ReadStringAsSlice() + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if iter.buf[iter.head] != ':' { + iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]})) + return + } + iter.head++ + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if ret == nil { + return str + } + return ret +} diff --git a/vendor/github.com/json-iterator/go/iter_skip.go b/vendor/github.com/json-iterator/go/iter_skip.go new file mode 100644 index 0000000000..f58beb9137 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_skip.go @@ -0,0 +1,129 @@ +package jsoniter + +import "fmt" + +// ReadNil reads a json object as nil and +// returns whether it's a nil or not +func (iter *Iterator) ReadNil() (ret bool) { + c := iter.nextToken() + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') // null + return true + } + iter.unreadByte() + return false +} + +// ReadBool reads a json object as BoolValue +func (iter *Iterator) ReadBool() (ret bool) { + c := iter.nextToken() + if c == 't' { + iter.skipThreeBytes('r', 'u', 'e') + return true + } + if c == 'f' { + iter.skipFourBytes('a', 'l', 's', 'e') + return false + } + iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c})) + return +} + +// SkipAndReturnBytes skip next JSON element, and return its content as []byte. +// The []byte can be kept, it is a copy of data. +func (iter *Iterator) SkipAndReturnBytes() []byte { + iter.startCapture(iter.head) + iter.Skip() + return iter.stopCapture() +} + +type captureBuffer struct { + startedAt int + captured []byte +} + +func (iter *Iterator) startCapture(captureStartedAt int) { + if iter.captured != nil { + panic("already in capture mode") + } + iter.captureStartedAt = captureStartedAt + iter.captured = make([]byte, 0, 32) +} + +func (iter *Iterator) stopCapture() []byte { + if iter.captured == nil { + panic("not in capture mode") + } + captured := iter.captured + remaining := iter.buf[iter.captureStartedAt:iter.head] + iter.captureStartedAt = -1 + iter.captured = nil + if len(captured) == 0 { + copied := make([]byte, len(remaining)) + copy(copied, remaining) + return copied + } + captured = append(captured, remaining...) + return captured +} + +// Skip skips a json object and positions to relatively the next json object +func (iter *Iterator) Skip() { + c := iter.nextToken() + switch c { + case '"': + iter.skipString() + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + case '0': + iter.unreadByte() + iter.ReadFloat32() + case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.skipNumber() + case '[': + iter.skipArray() + case '{': + iter.skipObject() + default: + iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c)) + return + } +} + +func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b4 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } +} + +func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } +} diff --git a/vendor/github.com/json-iterator/go/iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go new file mode 100644 index 0000000000..8fcdc3b69b --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go @@ -0,0 +1,144 @@ +//+build jsoniter_sloppy + +package jsoniter + +// sloppy but faster implementation, do not validate the input json + +func (iter *Iterator) skipNumber() { + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\r', '\t', ',', '}', ']': + iter.head = i + return + } + } + if !iter.loadMore() { + return + } + } +} + +func (iter *Iterator) skipArray() { + level := 1 + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '[': // If open symbol, increase level + level++ + case ']': // If close symbol, increase level + level-- + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete array") + return + } + } +} + +func (iter *Iterator) skipObject() { + level := 1 + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '{': // If open symbol, increase level + level++ + case '}': // If close symbol, increase level + level-- + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete object") + return + } + } +} + +func (iter *Iterator) skipString() { + for { + end, escaped := iter.findStringEnd() + if end == -1 { + if !iter.loadMore() { + iter.ReportError("skipString", "incomplete string") + return + } + if escaped { + iter.head = 1 // skip the first char as last char read is \ + } + } else { + iter.head = end + return + } + } +} + +// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go +// Tries to find the end of string +// Support if string contains escaped quote symbols. +func (iter *Iterator) findStringEnd() (int, bool) { + escaped := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + if !escaped { + return i + 1, false + } + j := i - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return i + 1, true + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + } + } else if c == '\\' { + escaped = true + } + } + j := iter.tail - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return -1, false // do not end with \ + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + + } + return -1, true // end with \ +} diff --git a/vendor/github.com/json-iterator/go/iter_skip_strict.go b/vendor/github.com/json-iterator/go/iter_skip_strict.go new file mode 100644 index 0000000000..f67bc2e831 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_skip_strict.go @@ -0,0 +1,89 @@ +//+build !jsoniter_sloppy + +package jsoniter + +import "fmt" + +func (iter *Iterator) skipNumber() { + if !iter.trySkipNumber() { + iter.unreadByte() + iter.ReadFloat32() + } +} + +func (iter *Iterator) trySkipNumber() bool { + dotFound := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + case '.': + if dotFound { + iter.ReportError("validateNumber", `more than one dot found in number`) + return true // already failed + } + if i+1 == iter.tail { + return false + } + c = iter.buf[i+1] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + iter.ReportError("validateNumber", `missing digit after dot`) + return true // already failed + } + dotFound = true + default: + switch c { + case ',', ']', '}', ' ', '\t', '\n', '\r': + if iter.head == i { + return false // if - without following digits + } + iter.head = i + return true // must be valid + } + return false // may be invalid + } + } + return false +} + +func (iter *Iterator) skipString() { + if !iter.trySkipString() { + iter.unreadByte() + iter.ReadString() + } +} + +func (iter *Iterator) trySkipString() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + iter.head = i + 1 + return true // valid + } else if c == '\\' { + return false + } else if c < ' ' { + iter.ReportError("trySkipString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return true // already failed + } + } + return false +} + +func (iter *Iterator) skipObject() { + iter.unreadByte() + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + return true + }) +} + +func (iter *Iterator) skipArray() { + iter.unreadByte() + iter.ReadArrayCB(func(iter *Iterator) bool { + iter.Skip() + return true + }) +} diff --git a/vendor/github.com/json-iterator/go/iter_str.go b/vendor/github.com/json-iterator/go/iter_str.go new file mode 100644 index 0000000000..adc487ea80 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_str.go @@ -0,0 +1,215 @@ +package jsoniter + +import ( + "fmt" + "unicode/utf16" +) + +// ReadString read string from iterator +func (iter *Iterator) ReadString() (ret string) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + ret = string(iter.buf[iter.head:i]) + iter.head = i + 1 + return ret + } else if c == '\\' { + break + } else if c < ' ' { + iter.ReportError("ReadString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return + } + } + return iter.readStringSlowPath() + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return "" + } + iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readStringSlowPath() (ret string) { + var str []byte + var c byte + for iter.Error == nil { + c = iter.readByte() + if c == '"' { + return string(str) + } + if c == '\\' { + c = iter.readByte() + str = iter.readEscapedChar(c, str) + } else { + str = append(str, c) + } + } + iter.ReportError("readStringSlowPath", "unexpected end of input") + return +} + +func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte { + switch c { + case 'u': + r := iter.readU4() + if utf16.IsSurrogate(r) { + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != '\\' { + iter.unreadByte() + str = appendRune(str, r) + return str + } + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != 'u' { + str = appendRune(str, r) + return iter.readEscapedChar(c, str) + } + r2 := iter.readU4() + if iter.Error != nil { + return nil + } + combined := utf16.DecodeRune(r, r2) + if combined == '\uFFFD' { + str = appendRune(str, r) + str = appendRune(str, r2) + } else { + str = appendRune(str, combined) + } + } else { + str = appendRune(str, r) + } + case '"': + str = append(str, '"') + case '\\': + str = append(str, '\\') + case '/': + str = append(str, '/') + case 'b': + str = append(str, '\b') + case 'f': + str = append(str, '\f') + case 'n': + str = append(str, '\n') + case 'r': + str = append(str, '\r') + case 't': + str = append(str, '\t') + default: + iter.ReportError("readEscapedChar", + `invalid escape char after \`) + return nil + } + return str +} + +// ReadStringAsSlice read string from iterator without copying into string form. +// The []byte can not be kept, as it will change after next iterator call. +func (iter *Iterator) ReadStringAsSlice() (ret []byte) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + // for: field name, base64, number + if iter.buf[i] == '"' { + // fast path: reuse the underlying buffer + ret = iter.buf[iter.head:i] + iter.head = i + 1 + return ret + } + } + readLen := iter.tail - iter.head + copied := make([]byte, readLen, readLen*2) + copy(copied, iter.buf[iter.head:iter.tail]) + iter.head = iter.tail + for iter.Error == nil { + c := iter.readByte() + if c == '"' { + return copied + } + copied = append(copied, c) + } + return copied + } + iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readU4() (ret rune) { + for i := 0; i < 4; i++ { + c := iter.readByte() + if iter.Error != nil { + return + } + if c >= '0' && c <= '9' { + ret = ret*16 + rune(c-'0') + } else if c >= 'a' && c <= 'f' { + ret = ret*16 + rune(c-'a'+10) + } else if c >= 'A' && c <= 'F' { + ret = ret*16 + rune(c-'A'+10) + } else { + iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c})) + return + } + } + return ret +} + +const ( + t1 = 0x00 // 0000 0000 + tx = 0x80 // 1000 0000 + t2 = 0xC0 // 1100 0000 + t3 = 0xE0 // 1110 0000 + t4 = 0xF0 // 1111 0000 + t5 = 0xF8 // 1111 1000 + + maskx = 0x3F // 0011 1111 + mask2 = 0x1F // 0001 1111 + mask3 = 0x0F // 0000 1111 + mask4 = 0x07 // 0000 0111 + + rune1Max = 1<<7 - 1 + rune2Max = 1<<11 - 1 + rune3Max = 1<<16 - 1 + + surrogateMin = 0xD800 + surrogateMax = 0xDFFF + + maxRune = '\U0010FFFF' // Maximum valid Unicode code point. + runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character" +) + +func appendRune(p []byte, r rune) []byte { + // Negative values are erroneous. Making it unsigned addresses the problem. + switch i := uint32(r); { + case i <= rune1Max: + p = append(p, byte(r)) + return p + case i <= rune2Max: + p = append(p, t2|byte(r>>6)) + p = append(p, tx|byte(r)&maskx) + return p + case i > maxRune, surrogateMin <= i && i <= surrogateMax: + r = runeError + fallthrough + case i <= rune3Max: + p = append(p, t3|byte(r>>12)) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + default: + p = append(p, t4|byte(r>>18)) + p = append(p, tx|byte(r>>12)&maskx) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + } +} diff --git a/vendor/github.com/json-iterator/go/jsoniter.go b/vendor/github.com/json-iterator/go/jsoniter.go new file mode 100644 index 0000000000..c2934f916e --- /dev/null +++ b/vendor/github.com/json-iterator/go/jsoniter.go @@ -0,0 +1,18 @@ +// Package jsoniter implements encoding and decoding of JSON as defined in +// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json. +// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter +// and variable type declarations (if any). +// jsoniter interfaces gives 100% compatibility with code using standard lib. +// +// "JSON and Go" +// (https://golang.org/doc/articles/json_and_go.html) +// gives a description of how Marshal/Unmarshal operate +// between arbitrary or predefined json objects and bytes, +// and it applies to jsoniter.Marshal/Unmarshal as well. +// +// Besides, jsoniter.Iterator provides a different set of interfaces +// iterating given bytes/string/reader +// and yielding parsed elements one by one. +// This set of interfaces reads input as required and gives +// better performance. +package jsoniter diff --git a/vendor/github.com/json-iterator/go/pool.go b/vendor/github.com/json-iterator/go/pool.go new file mode 100644 index 0000000000..e2389b56cf --- /dev/null +++ b/vendor/github.com/json-iterator/go/pool.go @@ -0,0 +1,42 @@ +package jsoniter + +import ( + "io" +) + +// IteratorPool a thread safe pool of iterators with same configuration +type IteratorPool interface { + BorrowIterator(data []byte) *Iterator + ReturnIterator(iter *Iterator) +} + +// StreamPool a thread safe pool of streams with same configuration +type StreamPool interface { + BorrowStream(writer io.Writer) *Stream + ReturnStream(stream *Stream) +} + +func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream { + stream := cfg.streamPool.Get().(*Stream) + stream.Reset(writer) + return stream +} + +func (cfg *frozenConfig) ReturnStream(stream *Stream) { + stream.out = nil + stream.Error = nil + stream.Attachment = nil + cfg.streamPool.Put(stream) +} + +func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator { + iter := cfg.iteratorPool.Get().(*Iterator) + iter.ResetBytes(data) + return iter +} + +func (cfg *frozenConfig) ReturnIterator(iter *Iterator) { + iter.Error = nil + iter.Attachment = nil + cfg.iteratorPool.Put(iter) +} diff --git a/vendor/github.com/json-iterator/go/reflect.go b/vendor/github.com/json-iterator/go/reflect.go new file mode 100644 index 0000000000..be7a0e2188 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect.go @@ -0,0 +1,330 @@ +package jsoniter + +import ( + "fmt" + "reflect" + "unsafe" + + "github.com/modern-go/reflect2" +) + +// ValDecoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValDecoder with json.Decoder. +// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link). +// +// Reflection on type to create decoders, which is then cached +// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions +// 1. create instance of new value, for example *int will need a int to be allocated +// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New +// 3. assignment to map, both key and value will be reflect.Value +// For a simple struct binding, it will be reflect.Value free and allocation free +type ValDecoder interface { + Decode(ptr unsafe.Pointer, iter *Iterator) +} + +// ValEncoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValEncoder with json.Encoder. +// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link). +type ValEncoder interface { + IsEmpty(ptr unsafe.Pointer) bool + Encode(ptr unsafe.Pointer, stream *Stream) +} + +type checkIsEmpty interface { + IsEmpty(ptr unsafe.Pointer) bool +} + +type ctx struct { + *frozenConfig + prefix string + encoders map[reflect2.Type]ValEncoder + decoders map[reflect2.Type]ValDecoder +} + +func (b *ctx) caseSensitive() bool { + if b.frozenConfig == nil { + // default is case-insensitive + return false + } + return b.frozenConfig.caseSensitive +} + +func (b *ctx) append(prefix string) *ctx { + return &ctx{ + frozenConfig: b.frozenConfig, + prefix: b.prefix + " " + prefix, + encoders: b.encoders, + decoders: b.decoders, + } +} + +// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal +func (iter *Iterator) ReadVal(obj interface{}) { + cacheKey := reflect2.RTypeOf(obj) + decoder := iter.cfg.getDecoderFromCache(cacheKey) + if decoder == nil { + typ := reflect2.TypeOf(obj) + if typ.Kind() != reflect.Ptr { + iter.ReportError("ReadVal", "can only unmarshal into pointer") + return + } + decoder = iter.cfg.DecoderOf(typ) + } + ptr := reflect2.PtrOf(obj) + if ptr == nil { + iter.ReportError("ReadVal", "can not read into nil pointer") + return + } + decoder.Decode(ptr, iter) +} + +// WriteVal copy the go interface into underlying JSON, same as json.Marshal +func (stream *Stream) WriteVal(val interface{}) { + if nil == val { + stream.WriteNil() + return + } + cacheKey := reflect2.RTypeOf(val) + encoder := stream.cfg.getEncoderFromCache(cacheKey) + if encoder == nil { + typ := reflect2.TypeOf(val) + encoder = stream.cfg.EncoderOf(typ) + } + encoder.Encode(reflect2.PtrOf(val), stream) +} + +func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder { + cacheKey := typ.RType() + decoder := cfg.getDecoderFromCache(cacheKey) + if decoder != nil { + return decoder + } + ctx := &ctx{ + frozenConfig: cfg, + prefix: "", + decoders: map[reflect2.Type]ValDecoder{}, + encoders: map[reflect2.Type]ValEncoder{}, + } + ptrType := typ.(*reflect2.UnsafePtrType) + decoder = decoderOfType(ctx, ptrType.Elem()) + cfg.addDecoderToCache(cacheKey, decoder) + return decoder +} + +func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := getTypeDecoderFromExtension(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfType(ctx, typ) + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + for _, extension := range ctx.extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + return decoder +} + +func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := ctx.decoders[typ] + if decoder != nil { + return decoder + } + placeholder := &placeholderDecoder{} + ctx.decoders[typ] = placeholder + decoder = _createDecoderOfType(ctx, typ) + placeholder.decoder = decoder + return decoder +} + +func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := createDecoderOfJsonRawMessage(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfJsonNumber(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfMarshaler(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfAny(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfNative(ctx, typ) + if decoder != nil { + return decoder + } + switch typ.Kind() { + case reflect.Interface: + ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType) + if isIFace { + return &ifaceDecoder{valType: ifaceType} + } + return &efaceDecoder{} + case reflect.Struct: + return decoderOfStruct(ctx, typ) + case reflect.Array: + return decoderOfArray(ctx, typ) + case reflect.Slice: + return decoderOfSlice(ctx, typ) + case reflect.Map: + return decoderOfMap(ctx, typ) + case reflect.Ptr: + return decoderOfOptional(ctx, typ) + default: + return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())} + } +} + +func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder { + cacheKey := typ.RType() + encoder := cfg.getEncoderFromCache(cacheKey) + if encoder != nil { + return encoder + } + ctx := &ctx{ + frozenConfig: cfg, + prefix: "", + decoders: map[reflect2.Type]ValDecoder{}, + encoders: map[reflect2.Type]ValEncoder{}, + } + encoder = encoderOfType(ctx, typ) + if typ.LikePtr() { + encoder = &onePtrEncoder{encoder} + } + cfg.addEncoderToCache(cacheKey, encoder) + return encoder +} + +type onePtrEncoder struct { + encoder ValEncoder +} + +func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr)) +} + +func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.encoder.Encode(unsafe.Pointer(&ptr), stream) +} + +func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := getTypeEncoderFromExtension(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfType(ctx, typ) + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + for _, extension := range ctx.extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + return encoder +} + +func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := ctx.encoders[typ] + if encoder != nil { + return encoder + } + placeholder := &placeholderEncoder{} + ctx.encoders[typ] = placeholder + encoder = _createEncoderOfType(ctx, typ) + placeholder.encoder = encoder + return encoder +} +func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := createEncoderOfJsonRawMessage(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfJsonNumber(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfMarshaler(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfAny(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfNative(ctx, typ) + if encoder != nil { + return encoder + } + kind := typ.Kind() + switch kind { + case reflect.Interface: + return &dynamicEncoder{typ} + case reflect.Struct: + return encoderOfStruct(ctx, typ) + case reflect.Array: + return encoderOfArray(ctx, typ) + case reflect.Slice: + return encoderOfSlice(ctx, typ) + case reflect.Map: + return encoderOfMap(ctx, typ) + case reflect.Ptr: + return encoderOfOptional(ctx, typ) + default: + return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())} + } +} + +type lazyErrorDecoder struct { + err error +} + +func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.WhatIsNext() != NilValue { + if iter.Error == nil { + iter.Error = decoder.err + } + } else { + iter.Skip() + } +} + +type lazyErrorEncoder struct { + err error +} + +func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if ptr == nil { + stream.WriteNil() + } else if stream.Error == nil { + stream.Error = encoder.err + } +} + +func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type placeholderDecoder struct { + decoder ValDecoder +} + +func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.decoder.Decode(ptr, iter) +} + +type placeholderEncoder struct { + encoder ValEncoder +} + +func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.encoder.Encode(ptr, stream) +} + +func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.encoder.IsEmpty(ptr) +} diff --git a/vendor/github.com/json-iterator/go/reflect_array.go b/vendor/github.com/json-iterator/go/reflect_array.go new file mode 100644 index 0000000000..13a0b7b087 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_array.go @@ -0,0 +1,104 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "unsafe" +) + +func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder { + arrayType := typ.(*reflect2.UnsafeArrayType) + decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem()) + return &arrayDecoder{arrayType, decoder} +} + +func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder { + arrayType := typ.(*reflect2.UnsafeArrayType) + if arrayType.Len() == 0 { + return emptyArrayEncoder{} + } + encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem()) + return &arrayEncoder{arrayType, encoder} +} + +type emptyArrayEncoder struct{} + +func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteEmptyArray() +} + +func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return true +} + +type arrayEncoder struct { + arrayType *reflect2.UnsafeArrayType + elemEncoder ValEncoder +} + +func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteArrayStart() + elemPtr := unsafe.Pointer(ptr) + encoder.elemEncoder.Encode(elemPtr, stream) + for i := 1; i < encoder.arrayType.Len(); i++ { + stream.WriteMore() + elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i) + encoder.elemEncoder.Encode(elemPtr, stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error()) + } +} + +func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type arrayDecoder struct { + arrayType *reflect2.UnsafeArrayType + elemDecoder ValDecoder +} + +func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error()) + } +} + +func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + arrayType := decoder.arrayType + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return + } + if c != '[' { + iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c})) + return + } + c = iter.nextToken() + if c == ']' { + return + } + iter.unreadByte() + elemPtr := arrayType.UnsafeGetIndex(ptr, 0) + decoder.elemDecoder.Decode(elemPtr, iter) + length := 1 + for c = iter.nextToken(); c == ','; c = iter.nextToken() { + if length >= arrayType.Len() { + iter.Skip() + continue + } + idx := length + length += 1 + elemPtr = arrayType.UnsafeGetIndex(ptr, idx) + decoder.elemDecoder.Decode(elemPtr, iter) + } + if c != ']' { + iter.ReportError("decode array", "expect ], but found "+string([]byte{c})) + return + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_dynamic.go b/vendor/github.com/json-iterator/go/reflect_dynamic.go new file mode 100644 index 0000000000..8b6bc8b433 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_dynamic.go @@ -0,0 +1,70 @@ +package jsoniter + +import ( + "github.com/modern-go/reflect2" + "reflect" + "unsafe" +) + +type dynamicEncoder struct { + valType reflect2.Type +} + +func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + stream.WriteVal(obj) +} + +func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.valType.UnsafeIndirect(ptr) == nil +} + +type efaceDecoder struct { +} + +func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + pObj := (*interface{})(ptr) + obj := *pObj + if obj == nil { + *pObj = iter.Read() + return + } + typ := reflect2.TypeOf(obj) + if typ.Kind() != reflect.Ptr { + *pObj = iter.Read() + return + } + ptrType := typ.(*reflect2.UnsafePtrType) + ptrElemType := ptrType.Elem() + if iter.WhatIsNext() == NilValue { + if ptrElemType.Kind() != reflect.Ptr { + iter.skipFourBytes('n', 'u', 'l', 'l') + *pObj = nil + return + } + } + if reflect2.IsNil(obj) { + obj := ptrElemType.New() + iter.ReadVal(obj) + *pObj = obj + return + } + iter.ReadVal(obj) +} + +type ifaceDecoder struct { + valType *reflect2.UnsafeIFaceType +} + +func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew()) + return + } + obj := decoder.valType.UnsafeIndirect(ptr) + if reflect2.IsNil(obj) { + iter.ReportError("decode non empty interface", "can not unmarshal into nil") + return + } + iter.ReadVal(obj) +} diff --git a/vendor/github.com/json-iterator/go/reflect_extension.go b/vendor/github.com/json-iterator/go/reflect_extension.go new file mode 100644 index 0000000000..917bbe84ec --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_extension.go @@ -0,0 +1,471 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "reflect" + "sort" + "strings" + "unicode" + "unsafe" +) + +var typeDecoders = map[string]ValDecoder{} +var fieldDecoders = map[string]ValDecoder{} +var typeEncoders = map[string]ValEncoder{} +var fieldEncoders = map[string]ValEncoder{} +var extensions = []Extension{} + +// StructDescriptor describe how should we encode/decode the struct +type StructDescriptor struct { + Type reflect2.Type + Fields []*Binding +} + +// GetField get one field from the descriptor by its name. +// Can not use map here to keep field orders. +func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding { + for _, binding := range structDescriptor.Fields { + if binding.Field.Name() == fieldName { + return binding + } + } + return nil +} + +// Binding describe how should we encode/decode the struct field +type Binding struct { + levels []int + Field reflect2.StructField + FromNames []string + ToNames []string + Encoder ValEncoder + Decoder ValDecoder +} + +// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder. +// Can also rename fields by UpdateStructDescriptor. +type Extension interface { + UpdateStructDescriptor(structDescriptor *StructDescriptor) + CreateMapKeyDecoder(typ reflect2.Type) ValDecoder + CreateMapKeyEncoder(typ reflect2.Type) ValEncoder + CreateDecoder(typ reflect2.Type) ValDecoder + CreateEncoder(typ reflect2.Type) ValEncoder + DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder + DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder +} + +// DummyExtension embed this type get dummy implementation for all methods of Extension +type DummyExtension struct { +} + +// UpdateStructDescriptor No-op +func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateMapKeyDecoder No-op +func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateMapKeyEncoder No-op +func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// CreateDecoder No-op +func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateEncoder No-op +func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type EncoderExtension map[reflect2.Type]ValEncoder + +// UpdateStructDescriptor No-op +func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateDecoder No-op +func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateEncoder get encoder from map +func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder { + return extension[typ] +} + +// CreateMapKeyDecoder No-op +func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateMapKeyEncoder No-op +func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type DecoderExtension map[reflect2.Type]ValDecoder + +// UpdateStructDescriptor No-op +func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateMapKeyDecoder No-op +func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateMapKeyEncoder No-op +func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// CreateDecoder get decoder from map +func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder { + return extension[typ] +} + +// CreateEncoder No-op +func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type funcDecoder struct { + fun DecoderFunc +} + +func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.fun(ptr, iter) +} + +type funcEncoder struct { + fun EncoderFunc + isEmptyFunc func(ptr unsafe.Pointer) bool +} + +func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.fun(ptr, stream) +} + +func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool { + if encoder.isEmptyFunc == nil { + return false + } + return encoder.isEmptyFunc(ptr) +} + +// DecoderFunc the function form of TypeDecoder +type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator) + +// EncoderFunc the function form of TypeEncoder +type EncoderFunc func(ptr unsafe.Pointer, stream *Stream) + +// RegisterTypeDecoderFunc register TypeDecoder for a type with function +func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) { + typeDecoders[typ] = &funcDecoder{fun} +} + +// RegisterTypeDecoder register TypeDecoder for a typ +func RegisterTypeDecoder(typ string, decoder ValDecoder) { + typeDecoders[typ] = decoder +} + +// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function +func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) { + RegisterFieldDecoder(typ, field, &funcDecoder{fun}) +} + +// RegisterFieldDecoder register TypeDecoder for a struct field +func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) { + fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder +} + +// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function +func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc} +} + +// RegisterTypeEncoder register TypeEncoder for a type +func RegisterTypeEncoder(typ string, encoder ValEncoder) { + typeEncoders[typ] = encoder +} + +// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function +func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc}) +} + +// RegisterFieldEncoder register TypeEncoder for a struct field +func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) { + fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder +} + +// RegisterExtension register extension +func RegisterExtension(extension Extension) { + extensions = append(extensions, extension) +} + +func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := _getTypeDecoderFromExtension(ctx, typ) + if decoder != nil { + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + for _, extension := range ctx.extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + } + return decoder +} +func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder { + for _, extension := range extensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + for _, extension := range ctx.extensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + typeName := typ.String() + decoder := typeDecoders[typeName] + if decoder != nil { + return decoder + } + if typ.Kind() == reflect.Ptr { + ptrType := typ.(*reflect2.UnsafePtrType) + decoder := typeDecoders[ptrType.Elem().String()] + if decoder != nil { + return &OptionalDecoder{ptrType.Elem(), decoder} + } + } + return nil +} + +func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := _getTypeEncoderFromExtension(ctx, typ) + if encoder != nil { + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + for _, extension := range ctx.extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + } + return encoder +} + +func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder { + for _, extension := range extensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + for _, extension := range ctx.extensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + typeName := typ.String() + encoder := typeEncoders[typeName] + if encoder != nil { + return encoder + } + if typ.Kind() == reflect.Ptr { + typePtr := typ.(*reflect2.UnsafePtrType) + encoder := typeEncoders[typePtr.Elem().String()] + if encoder != nil { + return &OptionalEncoder{encoder} + } + } + return nil +} + +func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor { + structType := typ.(*reflect2.UnsafeStructType) + embeddedBindings := []*Binding{} + bindings := []*Binding{} + for i := 0; i < structType.NumField(); i++ { + field := structType.Field(i) + tag, hastag := field.Tag().Lookup(ctx.getTagKey()) + if ctx.onlyTaggedField && !hastag { + continue + } + tagParts := strings.Split(tag, ",") + if tag == "-" { + continue + } + if field.Anonymous() && (tag == "" || tagParts[0] == "") { + if field.Type().Kind() == reflect.Struct { + structDescriptor := describeStruct(ctx, field.Type()) + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty} + binding.Decoder = &structFieldDecoder{field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } else if field.Type().Kind() == reflect.Ptr { + ptrType := field.Type().(*reflect2.UnsafePtrType) + if ptrType.Elem().Kind() == reflect.Struct { + structDescriptor := describeStruct(ctx, ptrType.Elem()) + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &dereferenceEncoder{binding.Encoder} + binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty} + binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder} + binding.Decoder = &structFieldDecoder{field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } + } + } + fieldNames := calcFieldNames(field.Name(), tagParts[0], tag) + fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name()) + decoder := fieldDecoders[fieldCacheKey] + if decoder == nil { + decoder = decoderOfType(ctx.append(field.Name()), field.Type()) + } + encoder := fieldEncoders[fieldCacheKey] + if encoder == nil { + encoder = encoderOfType(ctx.append(field.Name()), field.Type()) + } + binding := &Binding{ + Field: field, + FromNames: fieldNames, + ToNames: fieldNames, + Decoder: decoder, + Encoder: encoder, + } + binding.levels = []int{i} + bindings = append(bindings, binding) + } + return createStructDescriptor(ctx, typ, bindings, embeddedBindings) +} +func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor { + structDescriptor := &StructDescriptor{ + Type: typ, + Fields: bindings, + } + for _, extension := range extensions { + extension.UpdateStructDescriptor(structDescriptor) + } + for _, extension := range ctx.extensions { + extension.UpdateStructDescriptor(structDescriptor) + } + processTags(structDescriptor, ctx.frozenConfig) + // merge normal & embedded bindings & sort with original order + allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...)) + sort.Sort(allBindings) + structDescriptor.Fields = allBindings + return structDescriptor +} + +type sortableBindings []*Binding + +func (bindings sortableBindings) Len() int { + return len(bindings) +} + +func (bindings sortableBindings) Less(i, j int) bool { + left := bindings[i].levels + right := bindings[j].levels + k := 0 + for { + if left[k] < right[k] { + return true + } else if left[k] > right[k] { + return false + } + k++ + } +} + +func (bindings sortableBindings) Swap(i, j int) { + bindings[i], bindings[j] = bindings[j], bindings[i] +} + +func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) { + for _, binding := range structDescriptor.Fields { + shouldOmitEmpty := false + tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",") + for _, tagPart := range tagParts[1:] { + if tagPart == "omitempty" { + shouldOmitEmpty = true + } else if tagPart == "string" { + if binding.Field.Type().Kind() == reflect.String { + binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg} + binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg} + } else { + binding.Decoder = &stringModeNumberDecoder{binding.Decoder} + binding.Encoder = &stringModeNumberEncoder{binding.Encoder} + } + } + } + binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder} + binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty} + } +} + +func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string { + // ignore? + if wholeTag == "-" { + return []string{} + } + // rename? + var fieldNames []string + if tagProvidedFieldName == "" { + fieldNames = []string{originalFieldName} + } else { + fieldNames = []string{tagProvidedFieldName} + } + // private? + isNotExported := unicode.IsLower(rune(originalFieldName[0])) + if isNotExported { + fieldNames = []string{} + } + return fieldNames +} diff --git a/vendor/github.com/json-iterator/go/reflect_json_number.go b/vendor/github.com/json-iterator/go/reflect_json_number.go new file mode 100644 index 0000000000..98d45c1ec2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_json_number.go @@ -0,0 +1,112 @@ +package jsoniter + +import ( + "encoding/json" + "github.com/modern-go/reflect2" + "strconv" + "unsafe" +) + +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +func CastJsonNumber(val interface{}) (string, bool) { + switch typedVal := val.(type) { + case json.Number: + return string(typedVal), true + case Number: + return string(typedVal), true + } + return "", false +} + +var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem() +var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem() + +func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{} + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{} + } + return nil +} + +func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{} + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{} + } + return nil +} + +type jsonNumberCodec struct { +} + +func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*json.Number)(ptr)) = json.Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*json.Number)(ptr)) = "" + default: + *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + number := *((*json.Number)(ptr)) + if len(number) == 0 { + stream.writeByte('0') + } else { + stream.WriteRaw(string(number)) + } +} + +func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.Number)(ptr))) == 0 +} + +type jsoniterNumberCodec struct { +} + +func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*Number)(ptr)) = Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*Number)(ptr)) = "" + default: + *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + number := *((*Number)(ptr)) + if len(number) == 0 { + stream.writeByte('0') + } else { + stream.WriteRaw(string(number)) + } +} + +func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*Number)(ptr))) == 0 +} diff --git a/vendor/github.com/json-iterator/go/reflect_json_raw_message.go b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go new file mode 100644 index 0000000000..f2619936c8 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go @@ -0,0 +1,60 @@ +package jsoniter + +import ( + "encoding/json" + "github.com/modern-go/reflect2" + "unsafe" +) + +var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem() +var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem() + +func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{} + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{} + } + return nil +} + +func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{} + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{} + } + return nil +} + +type jsonRawMessageCodec struct { +} + +func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes()) +} + +func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*json.RawMessage)(ptr)))) +} + +func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.RawMessage)(ptr))) == 0 +} + +type jsoniterRawMessageCodec struct { +} + +func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes()) +} + +func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*RawMessage)(ptr)))) +} + +func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*RawMessage)(ptr))) == 0 +} diff --git a/vendor/github.com/json-iterator/go/reflect_map.go b/vendor/github.com/json-iterator/go/reflect_map.go new file mode 100644 index 0000000000..8812f08501 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_map.go @@ -0,0 +1,318 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "reflect" + "sort" + "unsafe" +) + +func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder { + mapType := typ.(*reflect2.UnsafeMapType) + keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()) + elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem()) + return &mapDecoder{ + mapType: mapType, + keyType: mapType.Key(), + elemType: mapType.Elem(), + keyDecoder: keyDecoder, + elemDecoder: elemDecoder, + } +} + +func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder { + mapType := typ.(*reflect2.UnsafeMapType) + if ctx.sortMapKeys { + return &sortKeysMapEncoder{ + mapType: mapType, + keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()), + elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()), + } + } + return &mapEncoder{ + mapType: mapType, + keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()), + elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()), + } +} + +func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder { + for _, extension := range ctx.extensions { + decoder := extension.CreateMapKeyDecoder(typ) + if decoder != nil { + return decoder + } + } + switch typ.Kind() { + case reflect.String: + return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String)) + case reflect.Bool, + reflect.Uint8, reflect.Int8, + reflect.Uint16, reflect.Int16, + reflect.Uint32, reflect.Int32, + reflect.Uint64, reflect.Int64, + reflect.Uint, reflect.Int, + reflect.Float32, reflect.Float64, + reflect.Uintptr: + typ = reflect2.DefaultTypeOfKind(typ.Kind()) + return &numericMapKeyDecoder{decoderOfType(ctx, typ)} + default: + ptrType := reflect2.PtrTo(typ) + if ptrType.Implements(textMarshalerType) { + return &referenceDecoder{ + &textUnmarshalerDecoder{ + valType: ptrType, + }, + } + } + if typ.Implements(textMarshalerType) { + return &textUnmarshalerDecoder{ + valType: typ, + } + } + return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)} + } +} + +func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder { + for _, extension := range ctx.extensions { + encoder := extension.CreateMapKeyEncoder(typ) + if encoder != nil { + return encoder + } + } + switch typ.Kind() { + case reflect.String: + return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String)) + case reflect.Bool, + reflect.Uint8, reflect.Int8, + reflect.Uint16, reflect.Int16, + reflect.Uint32, reflect.Int32, + reflect.Uint64, reflect.Int64, + reflect.Uint, reflect.Int, + reflect.Float32, reflect.Float64, + reflect.Uintptr: + typ = reflect2.DefaultTypeOfKind(typ.Kind()) + return &numericMapKeyEncoder{encoderOfType(ctx, typ)} + default: + if typ == textMarshalerType { + return &directTextMarshalerEncoder{ + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + } + } + if typ.Implements(textMarshalerType) { + return &textMarshalerEncoder{ + valType: typ, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + } + } + if typ.Kind() == reflect.Interface { + return &dynamicMapKeyEncoder{ctx, typ} + } + return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)} + } +} + +type mapDecoder struct { + mapType *reflect2.UnsafeMapType + keyType reflect2.Type + elemType reflect2.Type + keyDecoder ValDecoder + elemDecoder ValDecoder +} + +func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + mapType := decoder.mapType + c := iter.nextToken() + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + *(*unsafe.Pointer)(ptr) = nil + mapType.UnsafeSet(ptr, mapType.UnsafeNew()) + return + } + if mapType.UnsafeIsNil(ptr) { + mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0)) + } + if c != '{' { + iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c})) + return + } + c = iter.nextToken() + if c == '}' { + return + } + if c != '"' { + iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c})) + return + } + iter.unreadByte() + key := decoder.keyType.UnsafeNew() + decoder.keyDecoder.Decode(key, iter) + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return + } + elem := decoder.elemType.UnsafeNew() + decoder.elemDecoder.Decode(elem, iter) + decoder.mapType.UnsafeSetIndex(ptr, key, elem) + for c = iter.nextToken(); c == ','; c = iter.nextToken() { + key := decoder.keyType.UnsafeNew() + decoder.keyDecoder.Decode(key, iter) + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return + } + elem := decoder.elemType.UnsafeNew() + decoder.elemDecoder.Decode(elem, iter) + decoder.mapType.UnsafeSetIndex(ptr, key, elem) + } + if c != '}' { + iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c})) + } +} + +type numericMapKeyDecoder struct { + decoder ValDecoder +} + +func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + if c != '"' { + iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c})) + return + } + decoder.decoder.Decode(ptr, iter) + c = iter.nextToken() + if c != '"' { + iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c})) + return + } +} + +type numericMapKeyEncoder struct { + encoder ValEncoder +} + +func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.writeByte('"') + encoder.encoder.Encode(ptr, stream) + stream.writeByte('"') +} + +func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type dynamicMapKeyEncoder struct { + ctx *ctx + valType reflect2.Type +} + +func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream) +} + +func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool { + obj := encoder.valType.UnsafeIndirect(ptr) + return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj)) +} + +type mapEncoder struct { + mapType *reflect2.UnsafeMapType + keyEncoder ValEncoder + elemEncoder ValEncoder +} + +func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteObjectStart() + iter := encoder.mapType.UnsafeIterate(ptr) + for i := 0; iter.HasNext(); i++ { + if i != 0 { + stream.WriteMore() + } + key, elem := iter.UnsafeNext() + encoder.keyEncoder.Encode(key, stream) + if stream.indention > 0 { + stream.writeTwoBytes(byte(':'), byte(' ')) + } else { + stream.writeByte(':') + } + encoder.elemEncoder.Encode(elem, stream) + } + stream.WriteObjectEnd() +} + +func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + iter := encoder.mapType.UnsafeIterate(ptr) + return !iter.HasNext() +} + +type sortKeysMapEncoder struct { + mapType *reflect2.UnsafeMapType + keyEncoder ValEncoder + elemEncoder ValEncoder +} + +func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *(*unsafe.Pointer)(ptr) == nil { + stream.WriteNil() + return + } + stream.WriteObjectStart() + mapIter := encoder.mapType.UnsafeIterate(ptr) + subStream := stream.cfg.BorrowStream(nil) + subIter := stream.cfg.BorrowIterator(nil) + keyValues := encodedKeyValues{} + for mapIter.HasNext() { + subStream.buf = make([]byte, 0, 64) + key, elem := mapIter.UnsafeNext() + encoder.keyEncoder.Encode(key, subStream) + if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil { + stream.Error = subStream.Error + } + encodedKey := subStream.Buffer() + subIter.ResetBytes(encodedKey) + decodedKey := subIter.ReadString() + if stream.indention > 0 { + subStream.writeTwoBytes(byte(':'), byte(' ')) + } else { + subStream.writeByte(':') + } + encoder.elemEncoder.Encode(elem, subStream) + keyValues = append(keyValues, encodedKV{ + key: decodedKey, + keyValue: subStream.Buffer(), + }) + } + sort.Sort(keyValues) + for i, keyValue := range keyValues { + if i != 0 { + stream.WriteMore() + } + stream.Write(keyValue.keyValue) + } + stream.WriteObjectEnd() + stream.cfg.ReturnStream(subStream) + stream.cfg.ReturnIterator(subIter) +} + +func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + iter := encoder.mapType.UnsafeIterate(ptr) + return !iter.HasNext() +} + +type encodedKeyValues []encodedKV + +type encodedKV struct { + key string + keyValue []byte +} + +func (sv encodedKeyValues) Len() int { return len(sv) } +func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key } diff --git a/vendor/github.com/json-iterator/go/reflect_marshaler.go b/vendor/github.com/json-iterator/go/reflect_marshaler.go new file mode 100644 index 0000000000..58ac959ad8 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_marshaler.go @@ -0,0 +1,218 @@ +package jsoniter + +import ( + "encoding" + "encoding/json" + "github.com/modern-go/reflect2" + "unsafe" +) + +var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem() +var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem() +var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem() +var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem() + +func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder { + ptrType := reflect2.PtrTo(typ) + if ptrType.Implements(unmarshalerType) { + return &referenceDecoder{ + &unmarshalerDecoder{ptrType}, + } + } + if ptrType.Implements(textUnmarshalerType) { + return &referenceDecoder{ + &textUnmarshalerDecoder{ptrType}, + } + } + return nil +} + +func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ == marshalerType { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &directMarshalerEncoder{ + checkIsEmpty: checkIsEmpty, + } + return encoder + } + if typ.Implements(marshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &marshalerEncoder{ + valType: typ, + checkIsEmpty: checkIsEmpty, + } + return encoder + } + ptrType := reflect2.PtrTo(typ) + if ctx.prefix != "" && ptrType.Implements(marshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, ptrType) + var encoder ValEncoder = &marshalerEncoder{ + valType: ptrType, + checkIsEmpty: checkIsEmpty, + } + return &referenceEncoder{encoder} + } + if typ == textMarshalerType { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &directTextMarshalerEncoder{ + checkIsEmpty: checkIsEmpty, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + } + return encoder + } + if typ.Implements(textMarshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &textMarshalerEncoder{ + valType: typ, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + checkIsEmpty: checkIsEmpty, + } + return encoder + } + // if prefix is empty, the type is the root type + if ctx.prefix != "" && ptrType.Implements(textMarshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, ptrType) + var encoder ValEncoder = &textMarshalerEncoder{ + valType: ptrType, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + checkIsEmpty: checkIsEmpty, + } + return &referenceEncoder{encoder} + } + return nil +} + +type marshalerEncoder struct { + checkIsEmpty checkIsEmpty + valType reflect2.Type +} + +func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + if encoder.valType.IsNullable() && reflect2.IsNil(obj) { + stream.WriteNil() + return + } + marshaler := obj.(json.Marshaler) + bytes, err := marshaler.MarshalJSON() + if err != nil { + stream.Error = err + } else { + stream.Write(bytes) + } +} + +func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type directMarshalerEncoder struct { + checkIsEmpty checkIsEmpty +} + +func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + marshaler := *(*json.Marshaler)(ptr) + if marshaler == nil { + stream.WriteNil() + return + } + bytes, err := marshaler.MarshalJSON() + if err != nil { + stream.Error = err + } else { + stream.Write(bytes) + } +} + +func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type textMarshalerEncoder struct { + valType reflect2.Type + stringEncoder ValEncoder + checkIsEmpty checkIsEmpty +} + +func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + if encoder.valType.IsNullable() && reflect2.IsNil(obj) { + stream.WriteNil() + return + } + marshaler := (obj).(encoding.TextMarshaler) + bytes, err := marshaler.MarshalText() + if err != nil { + stream.Error = err + } else { + str := string(bytes) + encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream) + } +} + +func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type directTextMarshalerEncoder struct { + stringEncoder ValEncoder + checkIsEmpty checkIsEmpty +} + +func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + marshaler := *(*encoding.TextMarshaler)(ptr) + if marshaler == nil { + stream.WriteNil() + return + } + bytes, err := marshaler.MarshalText() + if err != nil { + stream.Error = err + } else { + str := string(bytes) + encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream) + } +} + +func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type unmarshalerDecoder struct { + valType reflect2.Type +} + +func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valType := decoder.valType + obj := valType.UnsafeIndirect(ptr) + unmarshaler := obj.(json.Unmarshaler) + iter.nextToken() + iter.unreadByte() // skip spaces + bytes := iter.SkipAndReturnBytes() + err := unmarshaler.UnmarshalJSON(bytes) + if err != nil { + iter.ReportError("unmarshalerDecoder", err.Error()) + } +} + +type textUnmarshalerDecoder struct { + valType reflect2.Type +} + +func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valType := decoder.valType + obj := valType.UnsafeIndirect(ptr) + if reflect2.IsNil(obj) { + ptrType := valType.(*reflect2.UnsafePtrType) + elemType := ptrType.Elem() + elem := elemType.UnsafeNew() + ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem)) + obj = valType.UnsafeIndirect(ptr) + } + unmarshaler := (obj).(encoding.TextUnmarshaler) + str := iter.ReadString() + err := unmarshaler.UnmarshalText([]byte(str)) + if err != nil { + iter.ReportError("textUnmarshalerDecoder", err.Error()) + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_native.go b/vendor/github.com/json-iterator/go/reflect_native.go new file mode 100644 index 0000000000..9042eb0cb9 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_native.go @@ -0,0 +1,451 @@ +package jsoniter + +import ( + "encoding/base64" + "reflect" + "strconv" + "unsafe" + + "github.com/modern-go/reflect2" +) + +const ptrSize = 32 << uintptr(^uintptr(0)>>63) + +func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 { + sliceDecoder := decoderOfSlice(ctx, typ) + return &base64Codec{sliceDecoder: sliceDecoder} + } + typeName := typ.String() + kind := typ.Kind() + switch kind { + case reflect.String: + if typeName != "string" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem()) + } + return &stringCodec{} + case reflect.Int: + if typeName != "int" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &int32Codec{} + } + return &int64Codec{} + case reflect.Int8: + if typeName != "int8" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem()) + } + return &int8Codec{} + case reflect.Int16: + if typeName != "int16" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem()) + } + return &int16Codec{} + case reflect.Int32: + if typeName != "int32" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem()) + } + return &int32Codec{} + case reflect.Int64: + if typeName != "int64" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem()) + } + return &int64Codec{} + case reflect.Uint: + if typeName != "uint" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint8: + if typeName != "uint8" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem()) + } + return &uint8Codec{} + case reflect.Uint16: + if typeName != "uint16" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem()) + } + return &uint16Codec{} + case reflect.Uint32: + if typeName != "uint32" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem()) + } + return &uint32Codec{} + case reflect.Uintptr: + if typeName != "uintptr" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem()) + } + if ptrSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint64: + if typeName != "uint64" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem()) + } + return &uint64Codec{} + case reflect.Float32: + if typeName != "float32" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem()) + } + return &float32Codec{} + case reflect.Float64: + if typeName != "float64" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem()) + } + return &float64Codec{} + case reflect.Bool: + if typeName != "bool" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem()) + } + return &boolCodec{} + } + return nil +} + +func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 { + sliceDecoder := decoderOfSlice(ctx, typ) + return &base64Codec{sliceDecoder: sliceDecoder} + } + typeName := typ.String() + switch typ.Kind() { + case reflect.String: + if typeName != "string" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem()) + } + return &stringCodec{} + case reflect.Int: + if typeName != "int" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &int32Codec{} + } + return &int64Codec{} + case reflect.Int8: + if typeName != "int8" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem()) + } + return &int8Codec{} + case reflect.Int16: + if typeName != "int16" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem()) + } + return &int16Codec{} + case reflect.Int32: + if typeName != "int32" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem()) + } + return &int32Codec{} + case reflect.Int64: + if typeName != "int64" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem()) + } + return &int64Codec{} + case reflect.Uint: + if typeName != "uint" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint8: + if typeName != "uint8" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem()) + } + return &uint8Codec{} + case reflect.Uint16: + if typeName != "uint16" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem()) + } + return &uint16Codec{} + case reflect.Uint32: + if typeName != "uint32" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem()) + } + return &uint32Codec{} + case reflect.Uintptr: + if typeName != "uintptr" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem()) + } + if ptrSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint64: + if typeName != "uint64" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem()) + } + return &uint64Codec{} + case reflect.Float32: + if typeName != "float32" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem()) + } + return &float32Codec{} + case reflect.Float64: + if typeName != "float64" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem()) + } + return &float64Codec{} + case reflect.Bool: + if typeName != "bool" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem()) + } + return &boolCodec{} + } + return nil +} + +type stringCodec struct { +} + +func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*string)(ptr)) = iter.ReadString() +} + +func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteString(str) +} + +func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +type int8Codec struct { +} + +func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int8)(ptr)) = iter.ReadInt8() + } +} + +func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt8(*((*int8)(ptr))) +} + +func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int8)(ptr)) == 0 +} + +type int16Codec struct { +} + +func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int16)(ptr)) = iter.ReadInt16() + } +} + +func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt16(*((*int16)(ptr))) +} + +func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int16)(ptr)) == 0 +} + +type int32Codec struct { +} + +func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int32)(ptr)) = iter.ReadInt32() + } +} + +func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt32(*((*int32)(ptr))) +} + +func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int32)(ptr)) == 0 +} + +type int64Codec struct { +} + +func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int64)(ptr)) = iter.ReadInt64() + } +} + +func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt64(*((*int64)(ptr))) +} + +func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int64)(ptr)) == 0 +} + +type uint8Codec struct { +} + +func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint8)(ptr)) = iter.ReadUint8() + } +} + +func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint8(*((*uint8)(ptr))) +} + +func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint8)(ptr)) == 0 +} + +type uint16Codec struct { +} + +func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint16)(ptr)) = iter.ReadUint16() + } +} + +func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint16(*((*uint16)(ptr))) +} + +func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint16)(ptr)) == 0 +} + +type uint32Codec struct { +} + +func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint32)(ptr)) = iter.ReadUint32() + } +} + +func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint32(*((*uint32)(ptr))) +} + +func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint32)(ptr)) == 0 +} + +type uint64Codec struct { +} + +func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint64)(ptr)) = iter.ReadUint64() + } +} + +func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint64(*((*uint64)(ptr))) +} + +func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint64)(ptr)) == 0 +} + +type float32Codec struct { +} + +func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float32)(ptr)) = iter.ReadFloat32() + } +} + +func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32(*((*float32)(ptr))) +} + +func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type float64Codec struct { +} + +func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float64)(ptr)) = iter.ReadFloat64() + } +} + +func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64(*((*float64)(ptr))) +} + +func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +type boolCodec struct { +} + +func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*bool)(ptr)) = iter.ReadBool() + } +} + +func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteBool(*((*bool)(ptr))) +} + +func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool { + return !(*((*bool)(ptr))) +} + +type base64Codec struct { + sliceType *reflect2.UnsafeSliceType + sliceDecoder ValDecoder +} + +func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + codec.sliceType.UnsafeSetNil(ptr) + return + } + switch iter.WhatIsNext() { + case StringValue: + src := iter.ReadString() + dst, err := base64.StdEncoding.DecodeString(src) + if err != nil { + iter.ReportError("decode base64", err.Error()) + } else { + codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst)) + } + case ArrayValue: + codec.sliceDecoder.Decode(ptr, iter) + default: + iter.ReportError("base64Codec", "invalid input") + } +} + +func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + src := *((*[]byte)(ptr)) + if len(src) == 0 { + stream.WriteNil() + return + } + encoding := base64.StdEncoding + stream.writeByte('"') + size := encoding.EncodedLen(len(src)) + buf := make([]byte, size) + encoding.Encode(buf, src) + stream.buf = append(stream.buf, buf...) + stream.writeByte('"') +} + +func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*[]byte)(ptr))) == 0 +} diff --git a/vendor/github.com/json-iterator/go/reflect_optional.go b/vendor/github.com/json-iterator/go/reflect_optional.go new file mode 100644 index 0000000000..43ec71d6da --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_optional.go @@ -0,0 +1,133 @@ +package jsoniter + +import ( + "github.com/modern-go/reflect2" + "reflect" + "unsafe" +) + +func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder { + ptrType := typ.(*reflect2.UnsafePtrType) + elemType := ptrType.Elem() + decoder := decoderOfType(ctx, elemType) + if ctx.prefix == "" && elemType.Kind() == reflect.Ptr { + return &dereferenceDecoder{elemType, decoder} + } + return &OptionalDecoder{elemType, decoder} +} + +func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder { + ptrType := typ.(*reflect2.UnsafePtrType) + elemType := ptrType.Elem() + elemEncoder := encoderOfType(ctx, elemType) + encoder := &OptionalEncoder{elemEncoder} + return encoder +} + +type OptionalDecoder struct { + ValueType reflect2.Type + ValueDecoder ValDecoder +} + +func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + *((*unsafe.Pointer)(ptr)) = nil + } else { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + newPtr := decoder.ValueType.UnsafeNew() + decoder.ValueDecoder.Decode(newPtr, iter) + *((*unsafe.Pointer)(ptr)) = newPtr + } else { + //reuse existing instance + decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } + } +} + +type dereferenceDecoder struct { + // only to deference a pointer + valueType reflect2.Type + valueDecoder ValDecoder +} + +func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + newPtr := decoder.valueType.UnsafeNew() + decoder.valueDecoder.Decode(newPtr, iter) + *((*unsafe.Pointer)(ptr)) = newPtr + } else { + //reuse existing instance + decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } +} + +type OptionalEncoder struct { + ValueEncoder ValEncoder +} + +func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*unsafe.Pointer)(ptr)) == nil +} + +type dereferenceEncoder struct { + ValueEncoder ValEncoder +} + +func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + dePtr := *((*unsafe.Pointer)(ptr)) + if dePtr == nil { + return true + } + return encoder.ValueEncoder.IsEmpty(dePtr) +} + +func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool { + deReferenced := *((*unsafe.Pointer)(ptr)) + if deReferenced == nil { + return true + } + isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil) + if !converted { + return false + } + fieldPtr := unsafe.Pointer(deReferenced) + return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr) +} + +type referenceEncoder struct { + encoder ValEncoder +} + +func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.encoder.Encode(unsafe.Pointer(&ptr), stream) +} + +func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr)) +} + +type referenceDecoder struct { + decoder ValDecoder +} + +func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.decoder.Decode(unsafe.Pointer(&ptr), iter) +} diff --git a/vendor/github.com/json-iterator/go/reflect_slice.go b/vendor/github.com/json-iterator/go/reflect_slice.go new file mode 100644 index 0000000000..9441d79df3 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_slice.go @@ -0,0 +1,99 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "unsafe" +) + +func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder { + sliceType := typ.(*reflect2.UnsafeSliceType) + decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem()) + return &sliceDecoder{sliceType, decoder} +} + +func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder { + sliceType := typ.(*reflect2.UnsafeSliceType) + encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem()) + return &sliceEncoder{sliceType, encoder} +} + +type sliceEncoder struct { + sliceType *reflect2.UnsafeSliceType + elemEncoder ValEncoder +} + +func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if encoder.sliceType.UnsafeIsNil(ptr) { + stream.WriteNil() + return + } + length := encoder.sliceType.UnsafeLengthOf(ptr) + if length == 0 { + stream.WriteEmptyArray() + return + } + stream.WriteArrayStart() + encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream) + for i := 1; i < length; i++ { + stream.WriteMore() + elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i) + encoder.elemEncoder.Encode(elemPtr, stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error()) + } +} + +func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.sliceType.UnsafeLengthOf(ptr) == 0 +} + +type sliceDecoder struct { + sliceType *reflect2.UnsafeSliceType + elemDecoder ValDecoder +} + +func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error()) + } +} + +func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + sliceType := decoder.sliceType + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + sliceType.UnsafeSetNil(ptr) + return + } + if c != '[' { + iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c})) + return + } + c = iter.nextToken() + if c == ']' { + sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0)) + return + } + iter.unreadByte() + sliceType.UnsafeGrow(ptr, 1) + elemPtr := sliceType.UnsafeGetIndex(ptr, 0) + decoder.elemDecoder.Decode(elemPtr, iter) + length := 1 + for c = iter.nextToken(); c == ','; c = iter.nextToken() { + idx := length + length += 1 + sliceType.UnsafeGrow(ptr, length) + elemPtr = sliceType.UnsafeGetIndex(ptr, idx) + decoder.elemDecoder.Decode(elemPtr, iter) + } + if c != ']' { + iter.ReportError("decode slice", "expect ], but found "+string([]byte{c})) + return + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go new file mode 100644 index 0000000000..355d2d116b --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go @@ -0,0 +1,1048 @@ +package jsoniter + +import ( + "fmt" + "io" + "strings" + "unsafe" + + "github.com/modern-go/reflect2" +) + +func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder { + bindings := map[string]*Binding{} + structDescriptor := describeStruct(ctx, typ) + for _, binding := range structDescriptor.Fields { + for _, fromName := range binding.FromNames { + old := bindings[fromName] + if old == nil { + bindings[fromName] = binding + continue + } + ignoreOld, ignoreNew := resolveConflictBinding(ctx.frozenConfig, old, binding) + if ignoreOld { + delete(bindings, fromName) + } + if !ignoreNew { + bindings[fromName] = binding + } + } + } + fields := map[string]*structFieldDecoder{} + for k, binding := range bindings { + fields[k] = binding.Decoder.(*structFieldDecoder) + } + + if !ctx.caseSensitive() { + for k, binding := range bindings { + if _, found := fields[strings.ToLower(k)]; !found { + fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder) + } + } + } + + return createStructDecoder(ctx, typ, fields) +} + +func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structFieldDecoder) ValDecoder { + if ctx.disallowUnknownFields { + return &generalStructDecoder{typ: typ, fields: fields, disallowUnknownFields: true} + } + knownHash := map[int64]struct{}{ + 0: {}, + } + + switch len(fields) { + case 0: + return &skipObjectDecoder{typ} + case 1: + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder} + } + case 2: + var fieldHash1 int64 + var fieldHash2 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldHash1 == 0 { + fieldHash1 = fieldHash + fieldDecoder1 = fieldDecoder + } else { + fieldHash2 = fieldHash + fieldDecoder2 = fieldDecoder + } + } + return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2} + case 3: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } + } + return &threeFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3} + case 4: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } + } + return &fourFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4} + case 5: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } + } + return &fiveFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5} + case 6: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } + } + return &sixFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6} + case 7: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } + } + return &sevenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7} + case 8: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldName8 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } + } + return &eightFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8} + case 9: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldName8 int64 + var fieldName9 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } + } + return &nineFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8, + fieldName9, fieldDecoder9} + case 10: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldName8 int64 + var fieldName9 int64 + var fieldName10 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + var fieldDecoder10 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else if fieldName9 == 0 { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } else { + fieldName10 = fieldHash + fieldDecoder10 = fieldDecoder + } + } + return &tenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8, + fieldName9, fieldDecoder9, + fieldName10, fieldDecoder10} + } + return &generalStructDecoder{typ, fields, false} +} + +type generalStructDecoder struct { + typ reflect2.Type + fields map[string]*structFieldDecoder + disallowUnknownFields bool +} + +func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + var c byte + for c = ','; c == ','; c = iter.nextToken() { + decoder.decodeOneField(ptr, iter) + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + if c != '}' { + iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c})) + } +} + +func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) { + var field string + var fieldDecoder *structFieldDecoder + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes := iter.ReadStringAsSlice() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + fieldDecoder = decoder.fields[field] + if fieldDecoder == nil && !iter.cfg.caseSensitive { + fieldDecoder = decoder.fields[strings.ToLower(field)] + } + } else { + field = iter.ReadString() + fieldDecoder = decoder.fields[field] + if fieldDecoder == nil && !iter.cfg.caseSensitive { + fieldDecoder = decoder.fields[strings.ToLower(field)] + } + } + if fieldDecoder == nil { + msg := "found unknown field: " + field + if decoder.disallowUnknownFields { + iter.ReportError("ReadObject", msg) + } + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + iter.Skip() + return + } + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + fieldDecoder.Decode(ptr, iter) +} + +type skipObjectDecoder struct { + typ reflect2.Type +} + +func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valueType := iter.WhatIsNext() + if valueType != ObjectValue && valueType != NilValue { + iter.ReportError("skipObjectDecoder", "expect object or null") + return + } + iter.Skip() +} + +type oneFieldStructDecoder struct { + typ reflect2.Type + fieldHash int64 + fieldDecoder *structFieldDecoder +} + +func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + if iter.readFieldHash() == decoder.fieldHash { + decoder.fieldDecoder.Decode(ptr, iter) + } else { + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type twoFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder +} + +func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type threeFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder +} + +func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type fourFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder +} + +func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type fiveFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder +} + +func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type sixFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder +} + +func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type sevenFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder +} + +func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type eightFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder + fieldHash8 int64 + fieldDecoder8 *structFieldDecoder +} + +func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type nineFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder + fieldHash8 int64 + fieldDecoder8 *structFieldDecoder + fieldHash9 int64 + fieldDecoder9 *structFieldDecoder +} + +func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type tenFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder + fieldHash8 int64 + fieldDecoder8 *structFieldDecoder + fieldHash9 int64 + fieldDecoder9 *structFieldDecoder + fieldHash10 int64 + fieldDecoder10 *structFieldDecoder +} + +func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + case decoder.fieldHash10: + decoder.fieldDecoder10.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type structFieldDecoder struct { + field reflect2.StructField + fieldDecoder ValDecoder +} + +func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + fieldPtr := decoder.field.UnsafeGet(ptr) + decoder.fieldDecoder.Decode(fieldPtr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error()) + } +} + +type stringModeStringDecoder struct { + elemDecoder ValDecoder + cfg *frozenConfig +} + +func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.elemDecoder.Decode(ptr, iter) + str := *((*string)(ptr)) + tempIter := decoder.cfg.BorrowIterator([]byte(str)) + defer decoder.cfg.ReturnIterator(tempIter) + *((*string)(ptr)) = tempIter.ReadString() +} + +type stringModeNumberDecoder struct { + elemDecoder ValDecoder +} + +func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } + decoder.elemDecoder.Decode(ptr, iter) + if iter.Error != nil { + return + } + c = iter.readByte() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_struct_encoder.go b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go new file mode 100644 index 0000000000..d0759cf641 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go @@ -0,0 +1,210 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "reflect" + "unsafe" +) + +func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder { + type bindingTo struct { + binding *Binding + toName string + ignored bool + } + orderedBindings := []*bindingTo{} + structDescriptor := describeStruct(ctx, typ) + for _, binding := range structDescriptor.Fields { + for _, toName := range binding.ToNames { + new := &bindingTo{ + binding: binding, + toName: toName, + } + for _, old := range orderedBindings { + if old.toName != toName { + continue + } + old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding) + } + orderedBindings = append(orderedBindings, new) + } + } + if len(orderedBindings) == 0 { + return &emptyStructEncoder{} + } + finalOrderedFields := []structFieldTo{} + for _, bindingTo := range orderedBindings { + if !bindingTo.ignored { + finalOrderedFields = append(finalOrderedFields, structFieldTo{ + encoder: bindingTo.binding.Encoder.(*structFieldEncoder), + toName: bindingTo.toName, + }) + } + } + return &structEncoder{typ, finalOrderedFields} +} + +func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty { + encoder := createEncoderOfNative(ctx, typ) + if encoder != nil { + return encoder + } + kind := typ.Kind() + switch kind { + case reflect.Interface: + return &dynamicEncoder{typ} + case reflect.Struct: + return &structEncoder{typ: typ} + case reflect.Array: + return &arrayEncoder{} + case reflect.Slice: + return &sliceEncoder{} + case reflect.Map: + return encoderOfMap(ctx, typ) + case reflect.Ptr: + return &OptionalEncoder{} + default: + return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)} + } +} + +func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) { + newTagged := new.Field.Tag().Get(cfg.getTagKey()) != "" + oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != "" + if newTagged { + if oldTagged { + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } else { + return true, false + } + } else { + if oldTagged { + return true, false + } + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } +} + +type structFieldEncoder struct { + field reflect2.StructField + fieldEncoder ValEncoder + omitempty bool +} + +func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + fieldPtr := encoder.field.UnsafeGet(ptr) + encoder.fieldEncoder.Encode(fieldPtr, stream) + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error()) + } +} + +func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool { + fieldPtr := encoder.field.UnsafeGet(ptr) + return encoder.fieldEncoder.IsEmpty(fieldPtr) +} + +func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool { + isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil) + if !converted { + return false + } + fieldPtr := encoder.field.UnsafeGet(ptr) + return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr) +} + +type IsEmbeddedPtrNil interface { + IsEmbeddedPtrNil(ptr unsafe.Pointer) bool +} + +type structEncoder struct { + typ reflect2.Type + fields []structFieldTo +} + +type structFieldTo struct { + encoder *structFieldEncoder + toName string +} + +func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteObjectStart() + isNotFirst := false + for _, field := range encoder.fields { + if field.encoder.omitempty && field.encoder.IsEmpty(ptr) { + continue + } + if field.encoder.IsEmbeddedPtrNil(ptr) { + continue + } + if isNotFirst { + stream.WriteMore() + } + stream.WriteObjectField(field.toName) + field.encoder.Encode(ptr, stream) + isNotFirst = true + } + stream.WriteObjectEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error()) + } +} + +func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type emptyStructEncoder struct { +} + +func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteEmptyObject() +} + +func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type stringModeNumberEncoder struct { + elemEncoder ValEncoder +} + +func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.writeByte('"') + encoder.elemEncoder.Encode(ptr, stream) + stream.writeByte('"') +} + +func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} + +type stringModeStringEncoder struct { + elemEncoder ValEncoder + cfg *frozenConfig +} + +func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + tempStream := encoder.cfg.BorrowStream(nil) + defer encoder.cfg.ReturnStream(tempStream) + encoder.elemEncoder.Encode(ptr, tempStream) + stream.WriteString(string(tempStream.Buffer())) +} + +func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} diff --git a/vendor/github.com/json-iterator/go/stream.go b/vendor/github.com/json-iterator/go/stream.go new file mode 100644 index 0000000000..17662fdedc --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream.go @@ -0,0 +1,211 @@ +package jsoniter + +import ( + "io" +) + +// stream is a io.Writer like object, with JSON specific write functions. +// Error is not returned as return value, but stored as Error member on this stream instance. +type Stream struct { + cfg *frozenConfig + out io.Writer + buf []byte + Error error + indention int + Attachment interface{} // open for customized encoder +} + +// NewStream create new stream instance. +// cfg can be jsoniter.ConfigDefault. +// out can be nil if write to internal buffer. +// bufSize is the initial size for the internal buffer in bytes. +func NewStream(cfg API, out io.Writer, bufSize int) *Stream { + return &Stream{ + cfg: cfg.(*frozenConfig), + out: out, + buf: make([]byte, 0, bufSize), + Error: nil, + indention: 0, + } +} + +// Pool returns a pool can provide more stream with same configuration +func (stream *Stream) Pool() StreamPool { + return stream.cfg +} + +// Reset reuse this stream instance by assign a new writer +func (stream *Stream) Reset(out io.Writer) { + stream.out = out + stream.buf = stream.buf[:0] +} + +// Available returns how many bytes are unused in the buffer. +func (stream *Stream) Available() int { + return cap(stream.buf) - len(stream.buf) +} + +// Buffered returns the number of bytes that have been written into the current buffer. +func (stream *Stream) Buffered() int { + return len(stream.buf) +} + +// Buffer if writer is nil, use this method to take the result +func (stream *Stream) Buffer() []byte { + return stream.buf +} + +// SetBuffer allows to append to the internal buffer directly +func (stream *Stream) SetBuffer(buf []byte) { + stream.buf = buf +} + +// Write writes the contents of p into the buffer. +// It returns the number of bytes written. +// If nn < len(p), it also returns an error explaining +// why the write is short. +func (stream *Stream) Write(p []byte) (nn int, err error) { + stream.buf = append(stream.buf, p...) + if stream.out != nil { + nn, err = stream.out.Write(stream.buf) + stream.buf = stream.buf[nn:] + return + } + return len(p), nil +} + +// WriteByte writes a single byte. +func (stream *Stream) writeByte(c byte) { + stream.buf = append(stream.buf, c) +} + +func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) { + stream.buf = append(stream.buf, c1, c2) +} + +func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) { + stream.buf = append(stream.buf, c1, c2, c3) +} + +func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) { + stream.buf = append(stream.buf, c1, c2, c3, c4) +} + +func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) { + stream.buf = append(stream.buf, c1, c2, c3, c4, c5) +} + +// Flush writes any buffered data to the underlying io.Writer. +func (stream *Stream) Flush() error { + if stream.out == nil { + return nil + } + if stream.Error != nil { + return stream.Error + } + n, err := stream.out.Write(stream.buf) + if err != nil { + if stream.Error == nil { + stream.Error = err + } + return err + } + stream.buf = stream.buf[n:] + return nil +} + +// WriteRaw write string out without quotes, just like []byte +func (stream *Stream) WriteRaw(s string) { + stream.buf = append(stream.buf, s...) +} + +// WriteNil write null to stream +func (stream *Stream) WriteNil() { + stream.writeFourBytes('n', 'u', 'l', 'l') +} + +// WriteTrue write true to stream +func (stream *Stream) WriteTrue() { + stream.writeFourBytes('t', 'r', 'u', 'e') +} + +// WriteFalse write false to stream +func (stream *Stream) WriteFalse() { + stream.writeFiveBytes('f', 'a', 'l', 's', 'e') +} + +// WriteBool write true or false into stream +func (stream *Stream) WriteBool(val bool) { + if val { + stream.WriteTrue() + } else { + stream.WriteFalse() + } +} + +// WriteObjectStart write { with possible indention +func (stream *Stream) WriteObjectStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('{') + stream.writeIndention(0) +} + +// WriteObjectField write "field": with possible indention +func (stream *Stream) WriteObjectField(field string) { + stream.WriteString(field) + if stream.indention > 0 { + stream.writeTwoBytes(':', ' ') + } else { + stream.writeByte(':') + } +} + +// WriteObjectEnd write } with possible indention +func (stream *Stream) WriteObjectEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte('}') +} + +// WriteEmptyObject write {} +func (stream *Stream) WriteEmptyObject() { + stream.writeByte('{') + stream.writeByte('}') +} + +// WriteMore write , with possible indention +func (stream *Stream) WriteMore() { + stream.writeByte(',') + stream.writeIndention(0) + stream.Flush() +} + +// WriteArrayStart write [ with possible indention +func (stream *Stream) WriteArrayStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('[') + stream.writeIndention(0) +} + +// WriteEmptyArray write [] +func (stream *Stream) WriteEmptyArray() { + stream.writeTwoBytes('[', ']') +} + +// WriteArrayEnd write ] with possible indention +func (stream *Stream) WriteArrayEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte(']') +} + +func (stream *Stream) writeIndention(delta int) { + if stream.indention == 0 { + return + } + stream.writeByte('\n') + toWrite := stream.indention - delta + for i := 0; i < toWrite; i++ { + stream.buf = append(stream.buf, ' ') + } +} diff --git a/vendor/github.com/json-iterator/go/stream_float.go b/vendor/github.com/json-iterator/go/stream_float.go new file mode 100644 index 0000000000..f318d2c59d --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream_float.go @@ -0,0 +1,94 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var pow10 []uint64 + +func init() { + pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000} +} + +// WriteFloat32 write float32 to stream +func (stream *Stream) WriteFloat32(val float32) { + abs := math.Abs(float64(val)) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if float32(abs) < 1e-6 || float32(abs) >= 1e21 { + fmt = 'e' + } + } + stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32) +} + +// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat32Lossy(val float32) { + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat32(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(float64(val)*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[len(stream.buf)-1] == '0' { + stream.buf = stream.buf[:len(stream.buf)-1] + } +} + +// WriteFloat64 write float64 to stream +func (stream *Stream) WriteFloat64(val float64) { + abs := math.Abs(val) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if abs < 1e-6 || abs >= 1e21 { + fmt = 'e' + } + } + stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64) +} + +// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat64Lossy(val float64) { + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat64(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(val*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[len(stream.buf)-1] == '0' { + stream.buf = stream.buf[:len(stream.buf)-1] + } +} diff --git a/vendor/github.com/json-iterator/go/stream_int.go b/vendor/github.com/json-iterator/go/stream_int.go new file mode 100644 index 0000000000..d1059ee4c2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream_int.go @@ -0,0 +1,190 @@ +package jsoniter + +var digits []uint32 + +func init() { + digits = make([]uint32, 1000) + for i := uint32(0); i < 1000; i++ { + digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0' + if i < 10 { + digits[i] += 2 << 24 + } else if i < 100 { + digits[i] += 1 << 24 + } + } +} + +func writeFirstBuf(space []byte, v uint32) []byte { + start := v >> 24 + if start == 0 { + space = append(space, byte(v>>16), byte(v>>8)) + } else if start == 1 { + space = append(space, byte(v>>8)) + } + space = append(space, byte(v)) + return space +} + +func writeBuf(buf []byte, v uint32) []byte { + return append(buf, byte(v>>16), byte(v>>8), byte(v)) +} + +// WriteUint8 write uint8 to stream +func (stream *Stream) WriteUint8(val uint8) { + stream.buf = writeFirstBuf(stream.buf, digits[val]) +} + +// WriteInt8 write int8 to stream +func (stream *Stream) WriteInt8(nval int8) { + var val uint8 + if nval < 0 { + val = uint8(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint8(nval) + } + stream.buf = writeFirstBuf(stream.buf, digits[val]) +} + +// WriteUint16 write uint16 to stream +func (stream *Stream) WriteUint16(val uint16) { + q1 := val / 1000 + if q1 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[val]) + return + } + r1 := val - q1*1000 + stream.buf = writeFirstBuf(stream.buf, digits[q1]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return +} + +// WriteInt16 write int16 to stream +func (stream *Stream) WriteInt16(nval int16) { + var val uint16 + if nval < 0 { + val = uint16(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint16(nval) + } + stream.WriteUint16(val) +} + +// WriteUint32 write uint32 to stream +func (stream *Stream) WriteUint32(val uint32) { + q1 := val / 1000 + if q1 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[val]) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q1]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q2]) + } else { + r3 := q2 - q3*1000 + stream.buf = append(stream.buf, byte(q3+'0')) + stream.buf = writeBuf(stream.buf, digits[r3]) + } + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) +} + +// WriteInt32 write int32 to stream +func (stream *Stream) WriteInt32(nval int32) { + var val uint32 + if nval < 0 { + val = uint32(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint32(nval) + } + stream.WriteUint32(val) +} + +// WriteUint64 write uint64 to stream +func (stream *Stream) WriteUint64(val uint64) { + q1 := val / 1000 + if q1 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[val]) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q1]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q2]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r3 := q2 - q3*1000 + q4 := q3 / 1000 + if q4 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q3]) + stream.buf = writeBuf(stream.buf, digits[r3]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r4 := q3 - q4*1000 + q5 := q4 / 1000 + if q5 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q4]) + stream.buf = writeBuf(stream.buf, digits[r4]) + stream.buf = writeBuf(stream.buf, digits[r3]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r5 := q4 - q5*1000 + q6 := q5 / 1000 + if q6 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q5]) + } else { + stream.buf = writeFirstBuf(stream.buf, digits[q6]) + r6 := q5 - q6*1000 + stream.buf = writeBuf(stream.buf, digits[r6]) + } + stream.buf = writeBuf(stream.buf, digits[r5]) + stream.buf = writeBuf(stream.buf, digits[r4]) + stream.buf = writeBuf(stream.buf, digits[r3]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) +} + +// WriteInt64 write int64 to stream +func (stream *Stream) WriteInt64(nval int64) { + var val uint64 + if nval < 0 { + val = uint64(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint64(nval) + } + stream.WriteUint64(val) +} + +// WriteInt write int to stream +func (stream *Stream) WriteInt(val int) { + stream.WriteInt64(int64(val)) +} + +// WriteUint write uint to stream +func (stream *Stream) WriteUint(val uint) { + stream.WriteUint64(uint64(val)) +} diff --git a/vendor/github.com/json-iterator/go/stream_str.go b/vendor/github.com/json-iterator/go/stream_str.go new file mode 100644 index 0000000000..54c2ba0b3a --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream_str.go @@ -0,0 +1,372 @@ +package jsoniter + +import ( + "unicode/utf8" +) + +// htmlSafeSet holds the value true if the ASCII character with the given +// array position can be safely represented inside a JSON string, embedded +// inside of HTML