From 9cdea07fdd8e437a0688616faac3dac2d40da7be Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 12 Sep 2025 12:29:21 +0000 Subject: [PATCH 1/4] Initial plan From 899af2cb7e770f4343d18da370e3594fbb30aaa0 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 12 Sep 2025 12:33:10 +0000 Subject: [PATCH 2/4] Initial analysis and setup - implement plan to distinguish between network and internal errors Co-authored-by: tiwarishubham635 <59199353+tiwarishubham635@users.noreply.github.com> --- .../3.2.0/json-2.13.2/gem.build_complete | 0 .../3.2.0/json-2.13.2/gem_make.out | 25 + .../3.2.0/json-2.13.2/json/ext/generator.so | Bin 0 -> 220496 bytes .../3.2.0/json-2.13.2/json/ext/parser.so | Bin 0 -> 122032 bytes .../3.2.0/json-2.13.2/mkmf.log | 165 + .../3.2.0/racc-1.8.1/gem.build_complete | 0 .../3.2.0/racc-1.8.1/gem_make.out | 18 + .../3.2.0/racc-1.8.1/racc/cparse.so | Bin 0 -> 97128 bytes .../bundle/ruby/3.2.0/gems/base64-0.3.0/BSDL | 22 + .../ruby/3.2.0/gems/base64-0.3.0/COPYING | 56 + .../bundle/ruby/3.2.0/gems/base64-0.3.0/LEGAL | 60 + .../ruby/3.2.0/gems/base64-0.3.0/README.md | 48 + .../3.2.0/gems/base64-0.3.0/lib/base64.rb | 381 + .../3.2.0/gems/base64-0.3.0/sig/base64.rbs | 355 + .../benchmark-0.4.1/.github/dependabot.yml | 6 + .../.github/workflows/push_gem.yml | 46 + .../.github/workflows/test.yml | 32 + .../3.2.0/gems/benchmark-0.4.1/.gitignore | 9 + .../ruby/3.2.0/gems/benchmark-0.4.1/BSDL | 22 + .../ruby/3.2.0/gems/benchmark-0.4.1/COPYING | 56 + .../ruby/3.2.0/gems/benchmark-0.4.1/Gemfile | 9 + .../ruby/3.2.0/gems/benchmark-0.4.1/README.md | 138 + .../ruby/3.2.0/gems/benchmark-0.4.1/Rakefile | 8 + .../gems/benchmark-0.4.1/benchmark.gemspec | 32 + .../gems/benchmark-0.4.1/lib/benchmark.rb | 595 + .../ruby/3.2.0/gems/diff-lcs-1.6.2/.rspec | 1 + .../3.2.0/gems/diff-lcs-1.6.2/CHANGELOG.md | 518 + .../gems/diff-lcs-1.6.2/CODE_OF_CONDUCT.md | 128 + .../3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTING.md | 71 + .../3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTORS.md | 49 + .../ruby/3.2.0/gems/diff-lcs-1.6.2/LICENCE.md | 40 + .../3.2.0/gems/diff-lcs-1.6.2/Manifest.txt | 115 + .../ruby/3.2.0/gems/diff-lcs-1.6.2/README.md | 92 + .../ruby/3.2.0/gems/diff-lcs-1.6.2/Rakefile | 115 + .../3.2.0/gems/diff-lcs-1.6.2/SECURITY.md | 41 + .../gems/diff-lcs-1.6.2/docs/COPYING.txt | 339 + .../gems/diff-lcs-1.6.2/docs/artistic.txt | 127 + .../3.2.0/gems/diff-lcs-1.6.2/lib/diff-lcs.rb | 3 + .../3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs.rb | 742 + .../gems/diff-lcs-1.6.2/lib/diff/lcs/array.rb | 7 + .../diff-lcs-1.6.2/lib/diff/lcs/backports.rb | 13 + .../gems/diff-lcs-1.6.2/lib/diff/lcs/block.rb | 37 + .../diff-lcs-1.6.2/lib/diff/lcs/callbacks.rb | 327 + .../diff-lcs-1.6.2/lib/diff/lcs/change.rb | 174 + .../diff-lcs-1.6.2/lib/diff/lcs/htmldiff.rb | 160 + .../gems/diff-lcs-1.6.2/lib/diff/lcs/hunk.rb | 379 + .../diff-lcs-1.6.2/lib/diff/lcs/internals.rb | 308 + .../gems/diff-lcs-1.6.2/lib/diff/lcs/ldiff.rb | 189 + .../diff-lcs-1.6.2/lib/diff/lcs/string.rb | 5 + .../diff-lcs-1.6.2/lib/diff/lcs/version.rb | 7 + .../ruby/3.2.0/gems/diff-lcs-1.6.2/mise.toml | 5 + .../gems/diff-lcs-1.6.2/spec/change_spec.rb | 89 + .../gems/diff-lcs-1.6.2/spec/diff_spec.rb | 51 + .../gems/diff-lcs-1.6.2/spec/fixtures/123_x | 2 + .../gems/diff-lcs-1.6.2/spec/fixtures/456_x | 2 + .../gems/diff-lcs-1.6.2/spec/fixtures/aX | 1 + .../gems/diff-lcs-1.6.2/spec/fixtures/bXaX | 1 + .../gems/diff-lcs-1.6.2/spec/fixtures/ds1.csv | 50 + .../gems/diff-lcs-1.6.2/spec/fixtures/ds2.csv | 51 + .../gems/diff-lcs-1.6.2/spec/fixtures/empty | 0 .../diff-lcs-1.6.2/spec/fixtures/file1.bin | Bin 0 -> 1 bytes .../diff-lcs-1.6.2/spec/fixtures/file2.bin | Bin 0 -> 6 bytes .../diff-lcs-1.6.2/spec/fixtures/four_lines | 4 + .../fixtures/four_lines_with_missing_new_line | 4 + .../fixtures/ldiff/diff.missing_new_line1-e | 1 + .../fixtures/ldiff/diff.missing_new_line1-f | 1 + .../fixtures/ldiff/diff.missing_new_line2-e | 1 + .../fixtures/ldiff/diff.missing_new_line2-f | 1 + .../spec/fixtures/ldiff/error.diff.chef-e | 2 + .../spec/fixtures/ldiff/error.diff.chef-f | 2 + .../ldiff/error.diff.missing_new_line1-e | 1 + .../ldiff/error.diff.missing_new_line1-f | 1 + .../ldiff/error.diff.missing_new_line2-e | 1 + .../ldiff/error.diff.missing_new_line2-f | 1 + .../spec/fixtures/ldiff/output.diff | 4 + .../spec/fixtures/ldiff/output.diff-c | 7 + .../spec/fixtures/ldiff/output.diff-e | 3 + .../spec/fixtures/ldiff/output.diff-f | 3 + .../spec/fixtures/ldiff/output.diff-u | 5 + .../spec/fixtures/ldiff/output.diff.bin1 | 0 .../spec/fixtures/ldiff/output.diff.bin1-c | 0 .../spec/fixtures/ldiff/output.diff.bin1-e | 0 .../spec/fixtures/ldiff/output.diff.bin1-f | 0 .../spec/fixtures/ldiff/output.diff.bin1-u | 0 .../spec/fixtures/ldiff/output.diff.bin2 | 1 + .../spec/fixtures/ldiff/output.diff.bin2-c | 1 + .../spec/fixtures/ldiff/output.diff.bin2-e | 1 + .../spec/fixtures/ldiff/output.diff.bin2-f | 1 + .../spec/fixtures/ldiff/output.diff.bin2-u | 1 + .../spec/fixtures/ldiff/output.diff.chef | 4 + .../spec/fixtures/ldiff/output.diff.chef-c | 15 + .../spec/fixtures/ldiff/output.diff.chef-e | 3 + .../spec/fixtures/ldiff/output.diff.chef-f | 3 + .../spec/fixtures/ldiff/output.diff.chef-u | 9 + .../spec/fixtures/ldiff/output.diff.chef2 | 7 + .../spec/fixtures/ldiff/output.diff.chef2-c | 20 + .../spec/fixtures/ldiff/output.diff.chef2-d | 7 + .../spec/fixtures/ldiff/output.diff.chef2-e | 7 + .../spec/fixtures/ldiff/output.diff.chef2-f | 7 + .../spec/fixtures/ldiff/output.diff.chef2-u | 16 + .../ldiff/output.diff.empty.vs.four_lines | 5 + .../ldiff/output.diff.empty.vs.four_lines-c | 9 + .../ldiff/output.diff.empty.vs.four_lines-e | 6 + .../ldiff/output.diff.empty.vs.four_lines-f | 6 + .../ldiff/output.diff.empty.vs.four_lines-u | 7 + .../ldiff/output.diff.four_lines.vs.empty | 5 + .../ldiff/output.diff.four_lines.vs.empty-c | 9 + .../ldiff/output.diff.four_lines.vs.empty-e | 1 + .../ldiff/output.diff.four_lines.vs.empty-f | 1 + .../ldiff/output.diff.four_lines.vs.empty-u | 7 + .../output.diff.issue95_trailing_context | 4 + .../output.diff.issue95_trailing_context-c | 9 + .../output.diff.issue95_trailing_context-e | 3 + .../output.diff.issue95_trailing_context-f | 3 + .../output.diff.issue95_trailing_context-u | 6 + .../ldiff/output.diff.missing_new_line1 | 5 + .../ldiff/output.diff.missing_new_line1-c | 14 + .../ldiff/output.diff.missing_new_line1-e | 0 .../ldiff/output.diff.missing_new_line1-f | 0 .../ldiff/output.diff.missing_new_line1-u | 9 + .../ldiff/output.diff.missing_new_line2 | 5 + .../ldiff/output.diff.missing_new_line2-c | 14 + .../ldiff/output.diff.missing_new_line2-e | 0 .../ldiff/output.diff.missing_new_line2-f | 0 .../ldiff/output.diff.missing_new_line2-u | 9 + .../diff-lcs-1.6.2/spec/fixtures/new-chef | 4 + .../diff-lcs-1.6.2/spec/fixtures/new-chef2 | 17 + .../diff-lcs-1.6.2/spec/fixtures/old-chef | 4 + .../diff-lcs-1.6.2/spec/fixtures/old-chef2 | 14 + .../gems/diff-lcs-1.6.2/spec/hunk_spec.rb | 83 + .../gems/diff-lcs-1.6.2/spec/issues_spec.rb | 160 + .../gems/diff-lcs-1.6.2/spec/lcs_spec.rb | 56 + .../gems/diff-lcs-1.6.2/spec/ldiff_spec.rb | 100 + .../gems/diff-lcs-1.6.2/spec/patch_spec.rb | 416 + .../gems/diff-lcs-1.6.2/spec/sdiff_spec.rb | 216 + .../gems/diff-lcs-1.6.2/spec/spec_helper.rb | 376 + .../spec/traverse_balanced_spec.rb | 312 + .../spec/traverse_sequences_spec.rb | 137 + .../gems/docile-1.4.1/.github/dependabot.yml | 15 + .../docile-1.4.1/.github/workflows/main.yml | 42 + .../ruby/3.2.0/gems/docile-1.4.1/.gitignore | 11 + .../ruby/3.2.0/gems/docile-1.4.1/.rspec | 2 + .../ruby/3.2.0/gems/docile-1.4.1/.rubocop.yml | 5 + .../ruby/3.2.0/gems/docile-1.4.1/.yardopts | 7 + .../ruby/3.2.0/gems/docile-1.4.1/Gemfile | 26 + .../ruby/3.2.0/gems/docile-1.4.1/HISTORY.md | 134 + .../ruby/3.2.0/gems/docile-1.4.1/LICENSE | 21 + .../ruby/3.2.0/gems/docile-1.4.1/README.md | 409 + .../ruby/3.2.0/gems/docile-1.4.1/Rakefile | 14 + .../ruby/3.2.0/gems/docile-1.4.1/SECURITY.md | 19 + .../3.2.0/gems/docile-1.4.1/docile.gemspec | 36 + .../3.2.0/gems/docile-1.4.1/lib/docile.rb | 134 + .../lib/docile/backtrace_filter.rb | 24 + .../docile/chaining_fallback_context_proxy.rb | 27 + .../gems/docile-1.4.1/lib/docile/execution.rb | 53 + .../lib/docile/fallback_context_proxy.rb | 107 + .../gems/docile-1.4.1/lib/docile/version.rb | 6 + .../3.2.0/gems/equivalent-xml-0.6.0/Gemfile | 3 + .../gems/equivalent-xml-0.6.0/LICENSE.txt | 20 + .../3.2.0/gems/equivalent-xml-0.6.0/README.md | 145 + .../3.2.0/gems/equivalent-xml-0.6.0/Rakefile | 27 + .../lib/equivalent-xml.rb | 204 + .../lib/equivalent-xml/rspec_matchers.rb | 72 + .../spec/equivalent-xml_spec.rb | 255 + .../ruby/3.2.0/gems/fakeweb-1.3.0/.autotest | 5 + .../ruby/3.2.0/gems/fakeweb-1.3.0/.gitignore | 7 + .../ruby/3.2.0/gems/fakeweb-1.3.0/CHANGELOG | 215 + .../ruby/3.2.0/gems/fakeweb-1.3.0/LICENSE.txt | 19 + .../ruby/3.2.0/gems/fakeweb-1.3.0/README.rdoc | 189 + .../ruby/3.2.0/gems/fakeweb-1.3.0/Rakefile | 67 + .../3.2.0/gems/fakeweb-1.3.0/fakeweb.gemspec | 126 + .../3.2.0/gems/fakeweb-1.3.0/lib/fake_web.rb | 215 + .../lib/fake_web/ext/net_http.rb | 72 + .../fakeweb-1.3.0/lib/fake_web/registry.rb | 127 + .../fakeweb-1.3.0/lib/fake_web/responder.rb | 122 + .../fakeweb-1.3.0/lib/fake_web/response.rb | 10 + .../fakeweb-1.3.0/lib/fake_web/stub_socket.rb | 15 + .../fakeweb-1.3.0/lib/fake_web/utility.rb | 87 + .../3.2.0/gems/fakeweb-1.3.0/lib/fakeweb.rb | 2 + .../test/fixtures/google_response_from_curl | 12 + .../google_response_with_transfer_encoding | 17 + .../google_response_without_transfer_encoding | 11 + .../test/fixtures/test_example.txt | 1 + .../fakeweb-1.3.0/test/fixtures/test_txt_file | 3 + .../test/test_allow_net_connect.rb | 168 + .../fakeweb-1.3.0/test/test_deprecations.rb | 54 + .../test/test_fake_authentication.rb | 92 + .../gems/fakeweb-1.3.0/test/test_fake_web.rb | 590 + .../test/test_fake_web_open_uri.rb | 58 + .../gems/fakeweb-1.3.0/test/test_helper.rb | 90 + .../fakeweb-1.3.0/test/test_last_request.rb | 29 + .../test/test_missing_open_uri.rb | 25 + .../test/test_missing_pathname.rb | 37 + .../test/test_other_net_http_libraries.rb | 36 + .../fakeweb-1.3.0/test/test_precedence.rb | 79 + .../fakeweb-1.3.0/test/test_query_string.rb | 45 + .../gems/fakeweb-1.3.0/test/test_regexes.rb | 157 + .../test/test_response_headers.rb | 79 + .../test/test_trailing_slashes.rb | 53 + .../gems/fakeweb-1.3.0/test/test_utility.rb | 83 + .../right_http_connection-1.2.4/History.txt | 59 + .../right_http_connection-1.2.4/Manifest.txt | 7 + .../right_http_connection-1.2.4/README.txt | 54 + .../right_http_connection-1.2.4/Rakefile | 103 + .../lib/net_fix.rb | 160 + .../lib/right_http_connection.rb | 435 + .../right_http_connection-1.2.4/setup.rb | 1585 + .../test/vendor/samuel-0.2.1/.document | 5 + .../test/vendor/samuel-0.2.1/.gitignore | 5 + .../test/vendor/samuel-0.2.1/LICENSE | 20 + .../test/vendor/samuel-0.2.1/README.rdoc | 70 + .../test/vendor/samuel-0.2.1/Rakefile | 62 + .../test/vendor/samuel-0.2.1/VERSION | 1 + .../test/vendor/samuel-0.2.1/lib/samuel.rb | 52 + .../samuel-0.2.1/lib/samuel/net_http.rb | 10 + .../vendor/samuel-0.2.1/lib/samuel/request.rb | 96 + .../test/vendor/samuel-0.2.1/samuel.gemspec | 69 + .../vendor/samuel-0.2.1/test/request_test.rb | 193 + .../vendor/samuel-0.2.1/test/samuel_test.rb | 42 + .../vendor/samuel-0.2.1/test/test_helper.rb | 66 + .../vendor/samuel-0.2.1/test/thread_test.rb | 32 + .../3.2.0/gems/faraday-2.13.4/CHANGELOG.md | 574 + .../ruby/3.2.0/gems/faraday-2.13.4/LICENSE.md | 20 + .../ruby/3.2.0/gems/faraday-2.13.4/README.md | 67 + .../ruby/3.2.0/gems/faraday-2.13.4/Rakefile | 12 + .../faraday-2.13.4/examples/client_spec.rb | 119 + .../faraday-2.13.4/examples/client_test.rb | 144 + .../3.2.0/gems/faraday-2.13.4/lib/faraday.rb | 158 + .../faraday-2.13.4/lib/faraday/adapter.rb | 101 + .../lib/faraday/adapter/test.rb | 311 + .../lib/faraday/adapter_registry.rb | 30 + .../faraday-2.13.4/lib/faraday/connection.rb | 564 + .../faraday/encoders/flat_params_encoder.rb | 105 + .../faraday/encoders/nested_params_encoder.rb | 183 + .../gems/faraday-2.13.4/lib/faraday/error.rb | 199 + .../lib/faraday/logging/formatter.rb | 118 + .../faraday-2.13.4/lib/faraday/methods.rb | 6 + .../faraday-2.13.4/lib/faraday/middleware.rb | 72 + .../lib/faraday/middleware_registry.rb | 83 + .../faraday-2.13.4/lib/faraday/options.rb | 219 + .../lib/faraday/options/connection_options.rb | 23 + .../faraday-2.13.4/lib/faraday/options/env.rb | 204 + .../lib/faraday/options/proxy_options.rb | 38 + .../lib/faraday/options/request_options.rb | 23 + .../lib/faraday/options/ssl_options.rb | 76 + .../faraday-2.13.4/lib/faraday/parameters.rb | 5 + .../lib/faraday/rack_builder.rb | 248 + .../faraday-2.13.4/lib/faraday/request.rb | 139 + .../lib/faraday/request/authorization.rb | 54 + .../lib/faraday/request/instrumentation.rb | 58 + .../lib/faraday/request/json.rb | 70 + .../lib/faraday/request/url_encoded.rb | 60 + .../faraday-2.13.4/lib/faraday/response.rb | 91 + .../lib/faraday/response/json.rb | 74 + .../lib/faraday/response/logger.rb | 39 + .../lib/faraday/response/raise_error.rb | 83 + .../gems/faraday-2.13.4/lib/faraday/utils.rb | 121 + .../lib/faraday/utils/headers.rb | 150 + .../lib/faraday/utils/params_hash.rb | 61 + .../faraday-2.13.4/lib/faraday/version.rb | 5 + .../external_adapters/faraday_specs_setup.rb | 14 + .../spec/faraday/adapter/test_spec.rb | 442 + .../spec/faraday/adapter_registry_spec.rb | 28 + .../spec/faraday/adapter_spec.rb | 55 + .../spec/faraday/connection_spec.rb | 808 + .../faraday-2.13.4/spec/faraday/error_spec.rb | 175 + .../spec/faraday/middleware_registry_spec.rb | 31 + .../spec/faraday/middleware_spec.rb | 213 + .../spec/faraday/options/env_spec.rb | 76 + .../spec/faraday/options/options_spec.rb | 297 + .../faraday/options/proxy_options_spec.rb | 79 + .../faraday/options/request_options_spec.rb | 19 + .../spec/faraday/params_encoders/flat_spec.rb | 42 + .../faraday/params_encoders/nested_spec.rb | 151 + .../spec/faraday/rack_builder_spec.rb | 317 + .../faraday/request/authorization_spec.rb | 118 + .../faraday/request/instrumentation_spec.rb | 74 + .../spec/faraday/request/json_spec.rb | 199 + .../spec/faraday/request/url_encoded_spec.rb | 93 + .../spec/faraday/request_spec.rb | 110 + .../spec/faraday/response/json_spec.rb | 206 + .../spec/faraday/response/logger_spec.rb | 293 + .../spec/faraday/response/raise_error_spec.rb | 275 + .../spec/faraday/response_spec.rb | 77 + .../spec/faraday/utils/headers_spec.rb | 109 + .../faraday-2.13.4/spec/faraday/utils_spec.rb | 120 + .../gems/faraday-2.13.4/spec/faraday_spec.rb | 43 + .../gems/faraday-2.13.4/spec/spec_helper.rb | 133 + .../spec/support/disabling_stub.rb | 14 + .../spec/support/fake_safe_buffer.rb | 15 + .../support/faraday_middleware_subclasses.rb | 18 + .../spec/support/helper_methods.rb | 96 + .../spec/support/shared_examples/adapter.rb | 105 + .../support/shared_examples/params_encoder.rb | 18 + .../support/shared_examples/request_method.rb | 263 + .../support/streaming_response_checker.rb | 35 + .../gems/faraday-net_http-3.4.1/LICENSE.md | 21 + .../gems/faraday-net_http-3.4.1/README.md | 57 + .../lib/faraday/adapter/net_http.rb | 206 + .../lib/faraday/net_http.rb | 10 + .../lib/faraday/net_http/version.rb | 7 + .../bundle/ruby/3.2.0/gems/json-2.13.2/BSDL | 22 + .../ruby/3.2.0/gems/json-2.13.2/CHANGES.md | 660 + .../ruby/3.2.0/gems/json-2.13.2/COPYING | 56 + .../bundle/ruby/3.2.0/gems/json-2.13.2/LEGAL | 8 + .../ruby/3.2.0/gems/json-2.13.2/README.md | 281 + .../ext/json/ext/fbuffer/fbuffer.h | 270 + .../ext/json/ext/generator/Makefile | 269 + .../ext/json/ext/generator/extconf.rb | 16 + .../ext/json/ext/generator/generator.c | 2162 ++ .../json-2.13.2/ext/json/ext/parser/Makefile | 269 + .../ext/json/ext/parser/extconf.rb | 15 + .../json-2.13.2/ext/json/ext/parser/parser.c | 1552 + .../json-2.13.2/ext/json/ext/simd/conf.rb | 24 + .../gems/json-2.13.2/ext/json/ext/simd/simd.h | 188 + .../json-2.13.2/ext/json/ext/vendor/fpconv.c | 479 + .../ext/json/ext/vendor/jeaiii-ltoa.h | 267 + .../ruby/3.2.0/gems/json-2.13.2/json.gemspec | 62 + .../ruby/3.2.0/gems/json-2.13.2/lib/json.rb | 620 + .../json-2.13.2/lib/json/add/bigdecimal.rb | 58 + .../gems/json-2.13.2/lib/json/add/complex.rb | 51 + .../gems/json-2.13.2/lib/json/add/core.rb | 12 + .../gems/json-2.13.2/lib/json/add/date.rb | 54 + .../json-2.13.2/lib/json/add/date_time.rb | 67 + .../json-2.13.2/lib/json/add/exception.rb | 49 + .../gems/json-2.13.2/lib/json/add/ostruct.rb | 54 + .../gems/json-2.13.2/lib/json/add/range.rb | 54 + .../gems/json-2.13.2/lib/json/add/rational.rb | 49 + .../gems/json-2.13.2/lib/json/add/regexp.rb | 48 + .../gems/json-2.13.2/lib/json/add/set.rb | 48 + .../gems/json-2.13.2/lib/json/add/struct.rb | 52 + .../gems/json-2.13.2/lib/json/add/symbol.rb | 52 + .../gems/json-2.13.2/lib/json/add/time.rb | 52 + .../3.2.0/gems/json-2.13.2/lib/json/common.rb | 1105 + .../3.2.0/gems/json-2.13.2/lib/json/ext.rb | 45 + .../json-2.13.2/lib/json/ext/generator.so | Bin 0 -> 220496 bytes .../lib/json/ext/generator/state.rb | 106 + .../gems/json-2.13.2/lib/json/ext/parser.so | Bin 0 -> 122032 bytes .../json-2.13.2/lib/json/generic_object.rb | 75 + .../lib/json/truffle_ruby/generator.rb | 690 + .../gems/json-2.13.2/lib/json/version.rb | 5 + .../bundle/ruby/3.2.0/gems/jwt-3.1.2/AUTHORS | 119 + .../ruby/3.2.0/gems/jwt-3.1.2/CHANGELOG.md | 991 + .../3.2.0/gems/jwt-3.1.2/CODE_OF_CONDUCT.md | 84 + .../ruby/3.2.0/gems/jwt-3.1.2/CONTRIBUTING.md | 98 + .../bundle/ruby/3.2.0/gems/jwt-3.1.2/LICENSE | 7 + .../ruby/3.2.0/gems/jwt-3.1.2/README.md | 782 + .../ruby/3.2.0/gems/jwt-3.1.2/UPGRADING.md | 47 + .../ruby/3.2.0/gems/jwt-3.1.2/lib/jwt.rb | 48 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/base64.rb | 27 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/claims.rb | 67 + .../gems/jwt-3.1.2/lib/jwt/claims/audience.rb | 30 + .../gems/jwt-3.1.2/lib/jwt/claims/crit.rb | 35 + .../lib/jwt/claims/decode_verifier.rb | 40 + .../jwt-3.1.2/lib/jwt/claims/expiration.rb | 32 + .../jwt-3.1.2/lib/jwt/claims/issued_at.rb | 22 + .../gems/jwt-3.1.2/lib/jwt/claims/issuer.rb | 34 + .../gems/jwt-3.1.2/lib/jwt/claims/jwt_id.rb | 35 + .../jwt-3.1.2/lib/jwt/claims/not_before.rb | 32 + .../gems/jwt-3.1.2/lib/jwt/claims/numeric.rb | 45 + .../gems/jwt-3.1.2/lib/jwt/claims/required.rb | 33 + .../gems/jwt-3.1.2/lib/jwt/claims/subject.rb | 30 + .../gems/jwt-3.1.2/lib/jwt/claims/verifier.rb | 61 + .../gems/jwt-3.1.2/lib/jwt/configuration.rb | 23 + .../lib/jwt/configuration/container.rb | 51 + .../jwt/configuration/decode_configuration.rb | 70 + .../jwt/configuration/jwk_configuration.rb | 28 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/decode.rb | 123 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/encode.rb | 30 + .../gems/jwt-3.1.2/lib/jwt/encoded_token.rb | 236 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/error.rb | 54 + .../ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/json.rb | 18 + .../ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa.rb | 103 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ecdsa.rb | 111 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/hmac.rb | 78 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/none.rb | 24 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ps.rb | 36 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/rsa.rb | 36 + .../lib/jwt/jwa/signing_algorithm.rb | 62 + .../gems/jwt-3.1.2/lib/jwt/jwa/unsupported.rb | 20 + .../ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk.rb | 55 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/ec.rb | 240 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/hmac.rb | 102 + .../gems/jwt-3.1.2/lib/jwt/jwk/key_base.rb | 72 + .../gems/jwt-3.1.2/lib/jwt/jwk/key_finder.rb | 73 + .../lib/jwt/jwk/kid_as_key_digest.rb | 16 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/rsa.rb | 206 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/set.rb | 82 + .../gems/jwt-3.1.2/lib/jwt/jwk/thumbprint.rb | 26 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/token.rb | 131 + .../3.2.0/gems/jwt-3.1.2/lib/jwt/version.rb | 50 + .../gems/jwt-3.1.2/lib/jwt/x5c_key_finder.rb | 52 + .../3.2.0/gems/jwt-3.1.2/ruby-jwt.gemspec | 44 + .../3.2.0/gems/logger-1.4.4/lib/logger.rb | 588 + .../gems/logger-1.4.4/lib/logger/errors.rb | 9 + .../gems/logger-1.4.4/lib/logger/formatter.rb | 36 + .../logger-1.4.4/lib/logger/log_device.rb | 205 + .../gems/logger-1.4.4/lib/logger/period.rb | 47 + .../gems/logger-1.4.4/lib/logger/severity.rb | 19 + .../gems/logger-1.4.4/lib/logger/version.rb | 5 + .../3.2.0/gems/logger-1.4.4/logger.gemspec | 27 + .../ruby/3.2.0/gems/net-http-0.6.0/BSDL | 22 + .../ruby/3.2.0/gems/net-http-0.6.0/COPYING | 56 + .../ruby/3.2.0/gems/net-http-0.6.0/Gemfile | 8 + .../ruby/3.2.0/gems/net-http-0.6.0/README.md | 93 + .../ruby/3.2.0/gems/net-http-0.6.0/Rakefile | 10 + .../net-http-0.6.0/doc/net-http/examples.rdoc | 31 + .../doc/net-http/included_getters.rdoc | 3 + .../3.2.0/gems/net-http-0.6.0/lib/net/http.rb | 2580 ++ .../net-http-0.6.0/lib/net/http/exceptions.rb | 34 + .../lib/net/http/generic_request.rb | 414 + .../net-http-0.6.0/lib/net/http/header.rb | 981 + .../lib/net/http/proxy_delta.rb | 17 + .../net-http-0.6.0/lib/net/http/request.rb | 88 + .../net-http-0.6.0/lib/net/http/requests.rb | 430 + .../net-http-0.6.0/lib/net/http/response.rb | 738 + .../net-http-0.6.0/lib/net/http/responses.rb | 1174 + .../net-http-0.6.0/lib/net/http/status.rb | 84 + .../gems/net-http-0.6.0/lib/net/https.rb | 23 + .../gems/net-http-0.6.0/net-http.gemspec | 39 + .../nokogiri-1.18.9-x86_64-linux-gnu/Gemfile | 40 + .../LICENSE-DEPENDENCIES.md | 2224 ++ .../LICENSE.md | 9 + .../README.md | 293 + .../dependencies.yml | 42 + .../ext/nokogiri/depend | 38 + .../ext/nokogiri/extconf.rb | 1165 + .../ext/nokogiri/gumbo.c | 610 + .../ext/nokogiri/html4_document.c | 171 + .../ext/nokogiri/html4_element_description.c | 299 + .../ext/nokogiri/html4_entity_lookup.c | 37 + .../ext/nokogiri/html4_sax_parser.c | 40 + .../ext/nokogiri/html4_sax_parser_context.c | 98 + .../ext/nokogiri/html4_sax_push_parser.c | 96 + .../ext/nokogiri/include/libexslt/exslt.h | 108 + .../nokogiri/include/libexslt/exsltconfig.h | 70 + .../nokogiri/include/libexslt/exsltexports.h | 63 + .../include/libxml2/libxml/HTMLparser.h | 336 + .../include/libxml2/libxml/HTMLtree.h | 147 + .../ext/nokogiri/include/libxml2/libxml/SAX.h | 202 + .../nokogiri/include/libxml2/libxml/SAX2.h | 171 + .../nokogiri/include/libxml2/libxml/c14n.h | 115 + .../nokogiri/include/libxml2/libxml/catalog.h | 182 + .../nokogiri/include/libxml2/libxml/chvalid.h | 230 + .../include/libxml2/libxml/debugXML.h | 217 + .../nokogiri/include/libxml2/libxml/dict.h | 82 + .../include/libxml2/libxml/encoding.h | 244 + .../include/libxml2/libxml/entities.h | 166 + .../nokogiri/include/libxml2/libxml/globals.h | 41 + .../nokogiri/include/libxml2/libxml/hash.h | 251 + .../nokogiri/include/libxml2/libxml/list.h | 137 + .../nokogiri/include/libxml2/libxml/nanoftp.h | 186 + .../include/libxml2/libxml/nanohttp.h | 98 + .../nokogiri/include/libxml2/libxml/parser.h | 1390 + .../include/libxml2/libxml/parserInternals.h | 671 + .../nokogiri/include/libxml2/libxml/pattern.h | 106 + .../nokogiri/include/libxml2/libxml/relaxng.h | 219 + .../include/libxml2/libxml/schemasInternals.h | 959 + .../include/libxml2/libxml/schematron.h | 143 + .../nokogiri/include/libxml2/libxml/threads.h | 87 + .../nokogiri/include/libxml2/libxml/tree.h | 1382 + .../ext/nokogiri/include/libxml2/libxml/uri.h | 106 + .../nokogiri/include/libxml2/libxml/valid.h | 477 + .../include/libxml2/libxml/xinclude.h | 136 + .../nokogiri/include/libxml2/libxml/xlink.h | 189 + .../nokogiri/include/libxml2/libxml/xmlIO.h | 438 + .../include/libxml2/libxml/xmlautomata.h | 146 + .../include/libxml2/libxml/xmlerror.h | 962 + .../include/libxml2/libxml/xmlexports.h | 146 + .../include/libxml2/libxml/xmlmemory.h | 188 + .../include/libxml2/libxml/xmlmodule.h | 57 + .../include/libxml2/libxml/xmlreader.h | 436 + .../include/libxml2/libxml/xmlregexp.h | 215 + .../nokogiri/include/libxml2/libxml/xmlsave.h | 102 + .../include/libxml2/libxml/xmlschemas.h | 249 + .../include/libxml2/libxml/xmlschemastypes.h | 152 + .../include/libxml2/libxml/xmlstring.h | 140 + .../include/libxml2/libxml/xmlunicode.h | 366 + .../include/libxml2/libxml/xmlversion.h | 347 + .../include/libxml2/libxml/xmlwriter.h | 489 + .../nokogiri/include/libxml2/libxml/xpath.h | 579 + .../include/libxml2/libxml/xpathInternals.h | 633 + .../include/libxml2/libxml/xpointer.h | 138 + .../ext/nokogiri/include/libxslt/attributes.h | 39 + .../ext/nokogiri/include/libxslt/documents.h | 93 + .../ext/nokogiri/include/libxslt/extensions.h | 262 + .../ext/nokogiri/include/libxslt/extra.h | 72 + .../ext/nokogiri/include/libxslt/functions.h | 78 + .../ext/nokogiri/include/libxslt/imports.h | 75 + .../ext/nokogiri/include/libxslt/keys.h | 53 + .../ext/nokogiri/include/libxslt/namespaces.h | 68 + .../include/libxslt/numbersInternals.h | 73 + .../ext/nokogiri/include/libxslt/pattern.h | 84 + .../ext/nokogiri/include/libxslt/preproc.h | 43 + .../ext/nokogiri/include/libxslt/security.h | 104 + .../ext/nokogiri/include/libxslt/templates.h | 77 + .../ext/nokogiri/include/libxslt/transform.h | 207 + .../ext/nokogiri/include/libxslt/variables.h | 118 + .../ext/nokogiri/include/libxslt/xslt.h | 110 + .../nokogiri/include/libxslt/xsltInternals.h | 1995 + .../ext/nokogiri/include/libxslt/xsltconfig.h | 146 + .../nokogiri/include/libxslt/xsltexports.h | 64 + .../ext/nokogiri/include/libxslt/xsltlocale.h | 44 + .../ext/nokogiri/include/libxslt/xsltutils.h | 343 + .../ext/nokogiri/libxml2_polyfill.c | 114 + .../ext/nokogiri/nokogiri.c | 294 + .../ext/nokogiri/nokogiri.h | 238 + .../ext/nokogiri/test_global_handlers.c | 40 + .../ext/nokogiri/xml_attr.c | 103 + .../ext/nokogiri/xml_attribute_decl.c | 70 + .../ext/nokogiri/xml_cdata.c | 62 + .../ext/nokogiri/xml_comment.c | 57 + .../ext/nokogiri/xml_document.c | 784 + .../ext/nokogiri/xml_document_fragment.c | 29 + .../ext/nokogiri/xml_dtd.c | 208 + .../ext/nokogiri/xml_element_content.c | 131 + .../ext/nokogiri/xml_element_decl.c | 69 + .../ext/nokogiri/xml_encoding_handler.c | 112 + .../ext/nokogiri/xml_entity_decl.c | 112 + .../ext/nokogiri/xml_entity_reference.c | 50 + .../ext/nokogiri/xml_namespace.c | 181 + .../ext/nokogiri/xml_node.c | 2459 ++ .../ext/nokogiri/xml_node_set.c | 518 + .../ext/nokogiri/xml_processing_instruction.c | 54 + .../ext/nokogiri/xml_reader.c | 777 + .../ext/nokogiri/xml_relax_ng.c | 149 + .../ext/nokogiri/xml_sax_parser.c | 403 + .../ext/nokogiri/xml_sax_parser_context.c | 396 + .../ext/nokogiri/xml_sax_push_parser.c | 206 + .../ext/nokogiri/xml_schema.c | 226 + .../ext/nokogiri/xml_syntax_error.c | 93 + .../ext/nokogiri/xml_text.c | 59 + .../ext/nokogiri/xml_xpath_context.c | 486 + .../ext/nokogiri/xslt_stylesheet.c | 421 + .../gumbo-parser/CHANGES.md | 63 + .../gumbo-parser/Makefile | 129 + .../gumbo-parser/THANKS | 27 + .../lib/nokogiri.rb | 128 + .../lib/nokogiri/3.1/nokogiri.so | Bin 0 -> 2246744 bytes .../lib/nokogiri/3.2/nokogiri.so | Bin 0 -> 2242648 bytes .../lib/nokogiri/3.3/nokogiri.so | Bin 0 -> 2242648 bytes .../lib/nokogiri/3.4/nokogiri.so | Bin 0 -> 2242648 bytes .../lib/nokogiri/class_resolver.rb | 67 + .../lib/nokogiri/css.rb | 132 + .../lib/nokogiri/css/node.rb | 58 + .../lib/nokogiri/css/parser.rb | 772 + .../lib/nokogiri/css/parser.y | 277 + .../lib/nokogiri/css/parser_extras.rb | 36 + .../lib/nokogiri/css/selector_cache.rb | 38 + .../lib/nokogiri/css/syntax_error.rb | 9 + .../lib/nokogiri/css/tokenizer.rb | 155 + .../lib/nokogiri/css/tokenizer.rex | 57 + .../lib/nokogiri/css/xpath_visitor.rb | 376 + .../lib/nokogiri/decorators/slop.rb | 42 + .../lib/nokogiri/encoding_handler.rb | 57 + .../lib/nokogiri/extension.rb | 32 + .../lib/nokogiri/gumbo.rb | 15 + .../lib/nokogiri/html.rb | 48 + .../lib/nokogiri/html4.rb | 42 + .../lib/nokogiri/html4/builder.rb | 37 + .../lib/nokogiri/html4/document.rb | 235 + .../lib/nokogiri/html4/document_fragment.rb | 166 + .../lib/nokogiri/html4/element_description.rb | 25 + .../html4/element_description_defaults.rb | 2040 + .../lib/nokogiri/html4/encoding_reader.rb | 121 + .../lib/nokogiri/html4/entity_lookup.rb | 15 + .../lib/nokogiri/html4/sax/parser.rb | 48 + .../lib/nokogiri/html4/sax/parser_context.rb | 15 + .../lib/nokogiri/html4/sax/push_parser.rb | 37 + .../lib/nokogiri/html5.rb | 368 + .../lib/nokogiri/html5/builder.rb | 40 + .../lib/nokogiri/html5/document.rb | 199 + .../lib/nokogiri/html5/document_fragment.rb | 200 + .../lib/nokogiri/html5/node.rb | 103 + .../lib/nokogiri/jruby/dependencies.rb | 3 + .../lib/nokogiri/jruby/nokogiri_jars.rb | 43 + .../lib/nokogiri/syntax_error.rb | 6 + .../lib/nokogiri/version.rb | 4 + .../lib/nokogiri/version/constant.rb | 6 + .../lib/nokogiri/version/info.rb | 224 + .../lib/nokogiri/xml.rb | 65 + .../lib/nokogiri/xml/attr.rb | 66 + .../lib/nokogiri/xml/attribute_decl.rb | 22 + .../lib/nokogiri/xml/builder.rb | 494 + .../lib/nokogiri/xml/cdata.rb | 13 + .../lib/nokogiri/xml/character_data.rb | 9 + .../lib/nokogiri/xml/document.rb | 514 + .../lib/nokogiri/xml/document_fragment.rb | 276 + .../lib/nokogiri/xml/dtd.rb | 34 + .../lib/nokogiri/xml/element_content.rb | 46 + .../lib/nokogiri/xml/element_decl.rb | 17 + .../lib/nokogiri/xml/entity_decl.rb | 23 + .../lib/nokogiri/xml/entity_reference.rb | 20 + .../lib/nokogiri/xml/namespace.rb | 57 + .../lib/nokogiri/xml/node.rb | 1650 + .../lib/nokogiri/xml/node/save_options.rb | 76 + .../lib/nokogiri/xml/node_set.rb | 449 + .../lib/nokogiri/xml/notation.rb | 19 + .../lib/nokogiri/xml/parse_options.rb | 213 + .../lib/nokogiri/xml/pp.rb | 4 + .../lib/nokogiri/xml/pp/character_data.rb | 21 + .../lib/nokogiri/xml/pp/node.rb | 73 + .../nokogiri/xml/processing_instruction.rb | 11 + .../lib/nokogiri/xml/reader.rb | 139 + .../lib/nokogiri/xml/relax_ng.rb | 75 + .../lib/nokogiri/xml/sax.rb | 54 + .../lib/nokogiri/xml/sax/document.rb | 258 + .../lib/nokogiri/xml/sax/parser.rb | 199 + .../lib/nokogiri/xml/sax/parser_context.rb | 129 + .../lib/nokogiri/xml/sax/push_parser.rb | 64 + .../lib/nokogiri/xml/schema.rb | 140 + .../lib/nokogiri/xml/searchable.rb | 274 + .../lib/nokogiri/xml/syntax_error.rb | 94 + .../lib/nokogiri/xml/text.rb | 11 + .../lib/nokogiri/xml/xpath.rb | 21 + .../lib/nokogiri/xml/xpath/syntax_error.rb | 13 + .../lib/nokogiri/xml/xpath_context.rb | 27 + .../lib/nokogiri/xslt.rb | 129 + .../lib/nokogiri/xslt/stylesheet.rb | 49 + .../lib/xsd/xmlparser/nokogiri.rb | 105 + .../ruby/3.2.0/gems/ostruct-0.6.3/.gitignore | 10 + .../bundle/ruby/3.2.0/gems/ostruct-0.6.3/BSDL | 22 + .../ruby/3.2.0/gems/ostruct-0.6.3/COPYING | 56 + .../ruby/3.2.0/gems/ostruct-0.6.3/Gemfile | 10 + .../ruby/3.2.0/gems/ostruct-0.6.3/README.md | 69 + .../ruby/3.2.0/gems/ostruct-0.6.3/Rakefile | 18 + .../3.2.0/gems/ostruct-0.6.3/lib/ostruct.rb | 492 + .../3.2.0/gems/ostruct-0.6.3/ostruct.gemspec | 26 + vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/BSDL | 22 + .../bundle/ruby/3.2.0/gems/racc-1.8.1/COPYING | 56 + .../ruby/3.2.0/gems/racc-1.8.1/ChangeLog | 846 + .../ruby/3.2.0/gems/racc-1.8.1/README.ja.rdoc | 58 + .../ruby/3.2.0/gems/racc-1.8.1/README.rdoc | 60 + vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/TODO | 5 + .../gems/racc-1.8.1/doc/en/grammar.en.rdoc | 218 + .../gems/racc-1.8.1/doc/en/grammar2.en.rdoc | 219 + .../gems/racc-1.8.1/doc/ja/command.ja.html | 99 + .../gems/racc-1.8.1/doc/ja/debug.ja.rdoc | 36 + .../gems/racc-1.8.1/doc/ja/grammar.ja.rdoc | 348 + .../gems/racc-1.8.1/doc/ja/index.ja.html | 10 + .../gems/racc-1.8.1/doc/ja/parser.ja.rdoc | 125 + .../gems/racc-1.8.1/doc/ja/usage.ja.html | 414 + .../gems/racc-1.8.1/ext/racc/cparse/Makefile | 269 + .../gems/racc-1.8.1/ext/racc/cparse/cparse.c | 840 + .../racc-1.8.1/ext/racc/cparse/extconf.rb | 8 + .../ruby/3.2.0/gems/racc-1.8.1/lib/racc.rb | 6 + .../3.2.0/gems/racc-1.8.1/lib/racc/compat.rb | 33 + .../3.2.0/gems/racc-1.8.1/lib/racc/cparse.so | Bin 0 -> 97128 bytes .../gems/racc-1.8.1/lib/racc/debugflags.rb | 60 + .../gems/racc-1.8.1/lib/racc/exception.rb | 16 + .../3.2.0/gems/racc-1.8.1/lib/racc/grammar.rb | 1191 + .../racc-1.8.1/lib/racc/grammarfileparser.rb | 667 + .../3.2.0/gems/racc-1.8.1/lib/racc/info.rb | 18 + .../3.2.0/gems/racc-1.8.1/lib/racc/iset.rb | 92 + .../racc-1.8.1/lib/racc/logfilegenerator.rb | 212 + .../gems/racc-1.8.1/lib/racc/parser-text.rb | 644 + .../3.2.0/gems/racc-1.8.1/lib/racc/parser.rb | 630 + .../lib/racc/parserfilegenerator.rb | 473 + .../gems/racc-1.8.1/lib/racc/sourcetext.rb | 35 + .../3.2.0/gems/racc-1.8.1/lib/racc/state.rb | 976 + .../lib/racc/statetransitiontable.rb | 311 + .../3.2.0/gems/racc-1.8.1/lib/racc/static.rb | 5 + .../ruby/3.2.0/gems/rack-2.2.17/CHANGELOG.md | 795 + .../3.2.0/gems/rack-2.2.17/CONTRIBUTING.md | 136 + .../ruby/3.2.0/gems/rack-2.2.17/MIT-LICENSE | 20 + .../ruby/3.2.0/gems/rack-2.2.17/README.rdoc | 347 + .../ruby/3.2.0/gems/rack-2.2.17/Rakefile | 130 + .../ruby/3.2.0/gems/rack-2.2.17/SPEC.rdoc | 292 + .../3.2.0/gems/rack-2.2.17/contrib/rack.png | Bin 0 -> 23805 bytes .../3.2.0/gems/rack-2.2.17/contrib/rack.svg | 150 + .../gems/rack-2.2.17/contrib/rack_logo.svg | 164 + .../3.2.0/gems/rack-2.2.17/contrib/rdoc.css | 412 + .../3.2.0/gems/rack-2.2.17/example/lobster.ru | 6 + .../rack-2.2.17/example/protectedlobster.rb | 16 + .../rack-2.2.17/example/protectedlobster.ru | 10 + .../ruby/3.2.0/gems/rack-2.2.17/lib/rack.rb | 141 + .../lib/rack/auth/abstract/handler.rb | 39 + .../lib/rack/auth/abstract/request.rb | 47 + .../gems/rack-2.2.17/lib/rack/auth/basic.rb | 60 + .../rack-2.2.17/lib/rack/auth/digest/md5.rb | 131 + .../rack-2.2.17/lib/rack/auth/digest/nonce.rb | 53 + .../lib/rack/auth/digest/params.rb | 54 + .../lib/rack/auth/digest/request.rb | 43 + .../gems/rack-2.2.17/lib/rack/body_proxy.rb | 45 + .../gems/rack-2.2.17/lib/rack/builder.rb | 257 + .../gems/rack-2.2.17/lib/rack/cascade.rb | 68 + .../gems/rack-2.2.17/lib/rack/chunked.rb | 117 + .../rack-2.2.17/lib/rack/common_logger.rb | 84 + .../rack-2.2.17/lib/rack/conditional_get.rb | 83 + .../3.2.0/gems/rack-2.2.17/lib/rack/config.rb | 22 + .../rack-2.2.17/lib/rack/content_length.rb | 38 + .../gems/rack-2.2.17/lib/rack/content_type.rb | 30 + .../rack-2.2.17/lib/rack/core_ext/regexp.rb | 14 + .../gems/rack-2.2.17/lib/rack/deflater.rb | 144 + .../gems/rack-2.2.17/lib/rack/directory.rb | 199 + .../3.2.0/gems/rack-2.2.17/lib/rack/etag.rb | 77 + .../3.2.0/gems/rack-2.2.17/lib/rack/events.rb | 153 + .../3.2.0/gems/rack-2.2.17/lib/rack/file.rb | 7 + .../3.2.0/gems/rack-2.2.17/lib/rack/files.rb | 218 + .../gems/rack-2.2.17/lib/rack/handler.rb | 104 + .../gems/rack-2.2.17/lib/rack/handler/cgi.rb | 59 + .../rack-2.2.17/lib/rack/handler/fastcgi.rb | 100 + .../gems/rack-2.2.17/lib/rack/handler/lsws.rb | 61 + .../gems/rack-2.2.17/lib/rack/handler/scgi.rb | 71 + .../gems/rack-2.2.17/lib/rack/handler/thin.rb | 36 + .../rack-2.2.17/lib/rack/handler/webrick.rb | 129 + .../3.2.0/gems/rack-2.2.17/lib/rack/head.rb | 25 + .../3.2.0/gems/rack-2.2.17/lib/rack/lint.rb | 795 + .../gems/rack-2.2.17/lib/rack/lobster.rb | 70 + .../3.2.0/gems/rack-2.2.17/lib/rack/lock.rb | 32 + .../3.2.0/gems/rack-2.2.17/lib/rack/logger.rb | 20 + .../gems/rack-2.2.17/lib/rack/media_type.rb | 53 + .../rack-2.2.17/lib/rack/method_override.rb | 52 + .../3.2.0/gems/rack-2.2.17/lib/rack/mime.rb | 685 + .../3.2.0/gems/rack-2.2.17/lib/rack/mock.rb | 302 + .../gems/rack-2.2.17/lib/rack/multipart.rb | 64 + .../lib/rack/multipart/generator.rb | 97 + .../rack-2.2.17/lib/rack/multipart/parser.rb | 384 + .../lib/rack/multipart/uploaded_file.rb | 41 + .../gems/rack-2.2.17/lib/rack/null_logger.rb | 39 + .../gems/rack-2.2.17/lib/rack/query_parser.rb | 264 + .../gems/rack-2.2.17/lib/rack/recursive.rb | 64 + .../gems/rack-2.2.17/lib/rack/reloader.rb | 114 + .../gems/rack-2.2.17/lib/rack/request.rb | 659 + .../gems/rack-2.2.17/lib/rack/response.rb | 318 + .../rack-2.2.17/lib/rack/rewindable_input.rb | 94 + .../gems/rack-2.2.17/lib/rack/runtime.rb | 34 + .../gems/rack-2.2.17/lib/rack/sendfile.rb | 162 + .../3.2.0/gems/rack-2.2.17/lib/rack/server.rb | 466 + .../lib/rack/session/abstract/id.rb | 523 + .../rack-2.2.17/lib/rack/session/cookie.rb | 203 + .../rack-2.2.17/lib/rack/session/memcache.rb | 10 + .../gems/rack-2.2.17/lib/rack/session/pool.rb | 90 + .../rack-2.2.17/lib/rack/show_exceptions.rb | 390 + .../gems/rack-2.2.17/lib/rack/show_status.rb | 113 + .../3.2.0/gems/rack-2.2.17/lib/rack/static.rb | 188 + .../rack-2.2.17/lib/rack/tempfile_reaper.rb | 22 + .../3.2.0/gems/rack-2.2.17/lib/rack/urlmap.rb | 97 + .../3.2.0/gems/rack-2.2.17/lib/rack/utils.rb | 632 + .../gems/rack-2.2.17/lib/rack/version.rb | 29 + .../ruby/3.2.0/gems/rack-2.2.17/rack.gemspec | 46 + .../ruby/3.2.0/gems/rake-13.3.0/History.rdoc | 2454 ++ .../ruby/3.2.0/gems/rake-13.3.0/MIT-LICENSE | 21 + .../ruby/3.2.0/gems/rake-13.3.0/README.rdoc | 155 + .../rake-13.3.0/doc/command_line_usage.rdoc | 158 + .../gems/rake-13.3.0/doc/example/Rakefile1 | 38 + .../gems/rake-13.3.0/doc/example/Rakefile2 | 35 + .../3.2.0/gems/rake-13.3.0/doc/example/a.c | 6 + .../3.2.0/gems/rake-13.3.0/doc/example/b.c | 6 + .../3.2.0/gems/rake-13.3.0/doc/example/main.c | 11 + .../3.2.0/gems/rake-13.3.0/doc/glossary.rdoc | 42 + .../ruby/3.2.0/gems/rake-13.3.0/doc/jamis.rb | 592 + .../gems/rake-13.3.0/doc/proto_rake.rdoc | 127 + .../ruby/3.2.0/gems/rake-13.3.0/doc/rake.1 | 156 + .../3.2.0/gems/rake-13.3.0/doc/rakefile.rdoc | 622 + .../3.2.0/gems/rake-13.3.0/doc/rational.rdoc | 151 + .../ruby/3.2.0/gems/rake-13.3.0/exe/rake | 27 + .../ruby/3.2.0/gems/rake-13.3.0/lib/rake.rb | 68 + .../gems/rake-13.3.0/lib/rake/application.rb | 854 + .../gems/rake-13.3.0/lib/rake/backtrace.rb | 25 + .../3.2.0/gems/rake-13.3.0/lib/rake/clean.rb | 78 + .../gems/rake-13.3.0/lib/rake/cloneable.rb | 17 + .../gems/rake-13.3.0/lib/rake/cpu_counter.rb | 122 + .../rake-13.3.0/lib/rake/default_loader.rb | 15 + .../rake-13.3.0/lib/rake/dsl_definition.rb | 196 + .../gems/rake-13.3.0/lib/rake/early_time.rb | 22 + .../gems/rake-13.3.0/lib/rake/ext/core.rb | 26 + .../gems/rake-13.3.0/lib/rake/ext/string.rb | 176 + .../lib/rake/file_creation_task.rb | 25 + .../gems/rake-13.3.0/lib/rake/file_list.rb | 435 + .../gems/rake-13.3.0/lib/rake/file_task.rb | 58 + .../gems/rake-13.3.0/lib/rake/file_utils.rb | 132 + .../rake-13.3.0/lib/rake/file_utils_ext.rb | 134 + .../rake-13.3.0/lib/rake/invocation_chain.rb | 57 + .../lib/rake/invocation_exception_mixin.rb | 17 + .../gems/rake-13.3.0/lib/rake/late_time.rb | 18 + .../gems/rake-13.3.0/lib/rake/linked_list.rb | 112 + .../rake-13.3.0/lib/rake/loaders/makefile.rb | 54 + .../gems/rake-13.3.0/lib/rake/multi_task.rb | 14 + .../gems/rake-13.3.0/lib/rake/name_space.rb | 38 + .../gems/rake-13.3.0/lib/rake/packagetask.rb | 222 + .../3.2.0/gems/rake-13.3.0/lib/rake/phony.rb | 16 + .../rake-13.3.0/lib/rake/private_reader.rb | 21 + .../gems/rake-13.3.0/lib/rake/promise.rb | 100 + .../rake-13.3.0/lib/rake/pseudo_status.rb | 30 + .../gems/rake-13.3.0/lib/rake/rake_module.rb | 67 + .../rake-13.3.0/lib/rake/rake_test_loader.rb | 27 + .../lib/rake/rule_recursion_overflow_error.rb | 20 + .../3.2.0/gems/rake-13.3.0/lib/rake/scope.rb | 43 + .../3.2.0/gems/rake-13.3.0/lib/rake/task.rb | 434 + .../lib/rake/task_argument_error.rb | 8 + .../rake-13.3.0/lib/rake/task_arguments.rb | 113 + .../gems/rake-13.3.0/lib/rake/task_manager.rb | 331 + .../gems/rake-13.3.0/lib/rake/tasklib.rb | 12 + .../gems/rake-13.3.0/lib/rake/testtask.rb | 189 + .../lib/rake/thread_history_display.rb | 49 + .../gems/rake-13.3.0/lib/rake/thread_pool.rb | 163 + .../gems/rake-13.3.0/lib/rake/trace_output.rb | 23 + .../gems/rake-13.3.0/lib/rake/version.rb | 10 + .../3.2.0/gems/rake-13.3.0/lib/rake/win32.rb | 51 + .../ruby/3.2.0/gems/rake-13.3.0/rake.gemspec | 101 + .../ruby/3.2.0/gems/rexml-3.4.4/LICENSE.txt | 22 + .../ruby/3.2.0/gems/rexml-3.4.4/NEWS.md | 843 + .../ruby/3.2.0/gems/rexml-3.4.4/README.md | 57 + .../gems/rexml-3.4.4/doc/rexml/context.rdoc | 143 + .../doc/rexml/tasks/rdoc/child.rdoc | 87 + .../doc/rexml/tasks/rdoc/document.rdoc | 276 + .../doc/rexml/tasks/rdoc/element.rdoc | 602 + .../doc/rexml/tasks/rdoc/node.rdoc | 97 + .../doc/rexml/tasks/rdoc/parent.rdoc | 267 + .../doc/rexml/tasks/tocs/child_toc.rdoc | 12 + .../doc/rexml/tasks/tocs/document_toc.rdoc | 30 + .../doc/rexml/tasks/tocs/element_toc.rdoc | 55 + .../doc/rexml/tasks/tocs/master_toc.rdoc | 135 + .../doc/rexml/tasks/tocs/node_toc.rdoc | 16 + .../doc/rexml/tasks/tocs/parent_toc.rdoc | 25 + .../gems/rexml-3.4.4/doc/rexml/tutorial.rdoc | 1358 + .../ruby/3.2.0/gems/rexml-3.4.4/lib/rexml.rb | 3 + .../gems/rexml-3.4.4/lib/rexml/attlistdecl.rb | 63 + .../gems/rexml-3.4.4/lib/rexml/attribute.rb | 210 + .../3.2.0/gems/rexml-3.4.4/lib/rexml/cdata.rb | 68 + .../3.2.0/gems/rexml-3.4.4/lib/rexml/child.rb | 96 + .../gems/rexml-3.4.4/lib/rexml/comment.rb | 80 + .../gems/rexml-3.4.4/lib/rexml/doctype.rb | 306 + .../gems/rexml-3.4.4/lib/rexml/document.rb | 471 + .../rexml-3.4.4/lib/rexml/dtd/attlistdecl.rb | 11 + .../gems/rexml-3.4.4/lib/rexml/dtd/dtd.rb | 47 + .../rexml-3.4.4/lib/rexml/dtd/elementdecl.rb | 18 + .../rexml-3.4.4/lib/rexml/dtd/entitydecl.rb | 57 + .../rexml-3.4.4/lib/rexml/dtd/notationdecl.rb | 40 + .../gems/rexml-3.4.4/lib/rexml/element.rb | 2578 ++ .../gems/rexml-3.4.4/lib/rexml/encoding.rb | 48 + .../gems/rexml-3.4.4/lib/rexml/entity.rb | 142 + .../lib/rexml/formatters/default.rb | 116 + .../lib/rexml/formatters/pretty.rb | 142 + .../lib/rexml/formatters/transitive.rb | 58 + .../gems/rexml-3.4.4/lib/rexml/functions.rb | 446 + .../gems/rexml-3.4.4/lib/rexml/instruction.rb | 79 + .../gems/rexml-3.4.4/lib/rexml/light/node.rb | 188 + .../gems/rexml-3.4.4/lib/rexml/namespace.rb | 63 + .../3.2.0/gems/rexml-3.4.4/lib/rexml/node.rb | 80 + .../gems/rexml-3.4.4/lib/rexml/output.rb | 30 + .../gems/rexml-3.4.4/lib/rexml/parent.rb | 166 + .../rexml-3.4.4/lib/rexml/parseexception.rb | 53 + .../lib/rexml/parsers/baseparser.rb | 949 + .../lib/rexml/parsers/lightparser.rb | 59 + .../lib/rexml/parsers/pullparser.rb | 213 + .../lib/rexml/parsers/sax2parser.rb | 270 + .../lib/rexml/parsers/streamparser.rb | 67 + .../lib/rexml/parsers/treeparser.rb | 89 + .../lib/rexml/parsers/ultralightparser.rb | 57 + .../lib/rexml/parsers/xpathparser.rb | 739 + .../gems/rexml-3.4.4/lib/rexml/quickpath.rb | 267 + .../3.2.0/gems/rexml-3.4.4/lib/rexml/rexml.rb | 39 + .../rexml-3.4.4/lib/rexml/sax2listener.rb | 98 + .../gems/rexml-3.4.4/lib/rexml/security.rb | 28 + .../gems/rexml-3.4.4/lib/rexml/source.rb | 388 + .../rexml-3.4.4/lib/rexml/streamlistener.rb | 93 + .../3.2.0/gems/rexml-3.4.4/lib/rexml/text.rb | 420 + .../lib/rexml/undefinednamespaceexception.rb | 9 + .../lib/rexml/validation/relaxng.rb | 540 + .../lib/rexml/validation/validation.rb | 144 + .../rexml/validation/validationexception.rb | 10 + .../gems/rexml-3.4.4/lib/rexml/xmldecl.rb | 130 + .../gems/rexml-3.4.4/lib/rexml/xmltokens.rb | 85 + .../3.2.0/gems/rexml-3.4.4/lib/rexml/xpath.rb | 70 + .../rexml-3.4.4/lib/rexml/xpath_parser.rb | 980 + .../ruby/3.2.0/gems/rspec-3.13.1/LICENSE.md | 27 + .../ruby/3.2.0/gems/rspec-3.13.1/README.md | 47 + .../ruby/3.2.0/gems/rspec-3.13.1/lib/rspec.rb | 3 + .../gems/rspec-3.13.1/lib/rspec/version.rb | 5 + .../3.2.0/gems/rspec-core-3.13.5/.document | 5 + .../3.2.0/gems/rspec-core-3.13.5/.yardopts | 8 + .../3.2.0/gems/rspec-core-3.13.5/Changelog.md | 2439 ++ .../3.2.0/gems/rspec-core-3.13.5/LICENSE.md | 26 + .../3.2.0/gems/rspec-core-3.13.5/README.md | 389 + .../3.2.0/gems/rspec-core-3.13.5/exe/rspec | 4 + .../rspec-core-3.13.5/lib/rspec/autorun.rb | 3 + .../gems/rspec-core-3.13.5/lib/rspec/core.rb | 212 + .../lib/rspec/core/backtrace_formatter.rb | 65 + .../lib/rspec/core/bisect/coordinator.rb | 62 + .../rspec/core/bisect/example_minimizer.rb | 173 + .../lib/rspec/core/bisect/fork_runner.rb | 140 + .../lib/rspec/core/bisect/server.rb | 61 + .../lib/rspec/core/bisect/shell_command.rb | 126 + .../lib/rspec/core/bisect/shell_runner.rb | 73 + .../lib/rspec/core/bisect/utilities.rb | 69 + .../lib/rspec/core/configuration.rb | 2419 ++ .../lib/rspec/core/configuration_options.rb | 240 + .../lib/rspec/core/did_you_mean.rb | 52 + .../rspec-core-3.13.5/lib/rspec/core/drb.rb | 120 + .../rspec-core-3.13.5/lib/rspec/core/dsl.rb | 98 + .../lib/rspec/core/example.rb | 666 + .../lib/rspec/core/example_group.rb | 912 + .../rspec/core/example_status_persister.rb | 235 + .../lib/rspec/core/filter_manager.rb | 231 + .../lib/rspec/core/flat_map.rb | 20 + .../lib/rspec/core/formatters.rb | 279 + .../core/formatters/base_bisect_formatter.rb | 45 + .../rspec/core/formatters/base_formatter.rb | 70 + .../core/formatters/base_text_formatter.rb | 75 + .../core/formatters/bisect_drb_formatter.rb | 29 + .../formatters/bisect_progress_formatter.rb | 157 + .../rspec/core/formatters/console_codes.rb | 76 + .../core/formatters/deprecation_formatter.rb | 223 + .../formatters/documentation_formatter.rb | 102 + .../core/formatters/exception_presenter.rb | 553 + .../core/formatters/failure_list_formatter.rb | 23 + .../formatters/fallback_message_formatter.rb | 28 + .../lib/rspec/core/formatters/helpers.rb | 118 + .../rspec/core/formatters/html_formatter.rb | 153 + .../lib/rspec/core/formatters/html_printer.rb | 412 + .../core/formatters/html_snippet_extractor.rb | 122 + .../rspec/core/formatters/json_formatter.rb | 103 + .../core/formatters/profile_formatter.rb | 68 + .../core/formatters/progress_formatter.rb | 29 + .../lib/rspec/core/formatters/protocol.rb | 182 + .../core/formatters/snippet_extractor.rb | 134 + .../core/formatters/syntax_highlighter.rb | 91 + .../rspec-core-3.13.5/lib/rspec/core/hooks.rb | 646 + .../lib/rspec/core/invocations.rb | 87 + .../lib/rspec/core/memoized_helpers.rb | 580 + .../lib/rspec/core/metadata.rb | 498 + .../lib/rspec/core/metadata_filter.rb | 255 + .../rspec/core/minitest_assertions_adapter.rb | 31 + .../rspec/core/mocking_adapters/flexmock.rb | 31 + .../lib/rspec/core/mocking_adapters/mocha.rb | 57 + .../lib/rspec/core/mocking_adapters/null.rb | 14 + .../lib/rspec/core/mocking_adapters/rr.rb | 31 + .../lib/rspec/core/mocking_adapters/rspec.rb | 32 + .../lib/rspec/core/notifications.rb | 523 + .../lib/rspec/core/option_parser.rb | 325 + .../lib/rspec/core/ordering.rb | 208 + .../lib/rspec/core/output_wrapper.rb | 29 + .../lib/rspec/core/pending.rb | 157 + .../lib/rspec/core/profiler.rb | 34 + .../lib/rspec/core/project_initializer.rb | 48 + .../lib/rspec/core/project_initializer/.rspec | 1 + .../project_initializer/spec/spec_helper.rb | 98 + .../lib/rspec/core/rake_task.rb | 190 + .../lib/rspec/core/reporter.rb | 266 + .../lib/rspec/core/ruby_project.rb | 53 + .../lib/rspec/core/runner.rb | 216 + .../lib/rspec/core/sandbox.rb | 37 + .../rspec-core-3.13.5/lib/rspec/core/set.rb | 54 + .../lib/rspec/core/shared_context.rb | 55 + .../lib/rspec/core/shared_example_group.rb | 271 + .../lib/rspec/core/shell_escape.rb | 49 + .../core/test_unit_assertions_adapter.rb | 30 + .../lib/rspec/core/version.rb | 9 + .../lib/rspec/core/warnings.rb | 40 + .../rspec-core-3.13.5/lib/rspec/core/world.rb | 287 + .../gems/rspec-expectations-3.13.5/.document | 5 + .../gems/rspec-expectations-3.13.5/.yardopts | 6 + .../rspec-expectations-3.13.5/Changelog.md | 1366 + .../gems/rspec-expectations-3.13.5/LICENSE.md | 25 + .../gems/rspec-expectations-3.13.5/README.md | 326 + .../lib/rspec/expectations.rb | 82 + .../expectations/block_snippet_extractor.rb | 255 + .../lib/rspec/expectations/configuration.rb | 244 + .../rspec/expectations/expectation_target.rb | 163 + .../lib/rspec/expectations/fail_with.rb | 39 + .../rspec/expectations/failure_aggregator.rb | 236 + .../lib/rspec/expectations/handler.rb | 181 + .../expectations/minitest_integration.rb | 58 + .../lib/rspec/expectations/syntax.rb | 132 + .../lib/rspec/expectations/version.rb | 8 + .../lib/rspec/matchers.rb | 1046 + .../lib/rspec/matchers/aliased_matcher.rb | 116 + .../lib/rspec/matchers/built_in.rb | 53 + .../lib/rspec/matchers/built_in/all.rb | 86 + .../rspec/matchers/built_in/base_matcher.rb | 225 + .../lib/rspec/matchers/built_in/be.rb | 191 + .../lib/rspec/matchers/built_in/be_between.rb | 77 + .../rspec/matchers/built_in/be_instance_of.rb | 26 + .../lib/rspec/matchers/built_in/be_kind_of.rb | 20 + .../lib/rspec/matchers/built_in/be_within.rb | 72 + .../lib/rspec/matchers/built_in/change.rb | 452 + .../lib/rspec/matchers/built_in/compound.rb | 293 + .../matchers/built_in/contain_exactly.rb | 312 + .../matchers/built_in/count_expectation.rb | 171 + .../lib/rspec/matchers/built_in/cover.rb | 24 + .../lib/rspec/matchers/built_in/eq.rb | 44 + .../lib/rspec/matchers/built_in/eql.rb | 38 + .../lib/rspec/matchers/built_in/equal.rb | 81 + .../lib/rspec/matchers/built_in/exist.rb | 90 + .../lib/rspec/matchers/built_in/has.rb | 194 + .../matchers/built_in/have_attributes.rb | 114 + .../lib/rspec/matchers/built_in/include.rb | 218 + .../lib/rspec/matchers/built_in/match.rb | 120 + .../lib/rspec/matchers/built_in/operators.rb | 128 + .../lib/rspec/matchers/built_in/output.rb | 207 + .../rspec/matchers/built_in/raise_error.rb | 275 + .../lib/rspec/matchers/built_in/respond_to.rb | 200 + .../lib/rspec/matchers/built_in/satisfy.rb | 62 + .../matchers/built_in/start_or_end_with.rb | 94 + .../rspec/matchers/built_in/throw_symbol.rb | 138 + .../lib/rspec/matchers/built_in/yield.rb | 375 + .../lib/rspec/matchers/composable.rb | 171 + .../lib/rspec/matchers/dsl.rb | 546 + .../lib/rspec/matchers/english_phrasing.rb | 60 + .../lib/rspec/matchers/fail_matchers.rb | 42 + .../rspec/matchers/generated_descriptions.rb | 41 + .../lib/rspec/matchers/matcher_delegator.rb | 61 + .../lib/rspec/matchers/matcher_protocol.rb | 105 + .../lib/rspec/matchers/multi_matcher_diff.rb | 82 + .../3.2.0/gems/rspec-mocks-3.13.5/.document | 5 + .../3.2.0/gems/rspec-mocks-3.13.5/.yardopts | 6 + .../gems/rspec-mocks-3.13.5/Changelog.md | 1329 + .../3.2.0/gems/rspec-mocks-3.13.5/LICENSE.md | 25 + .../3.2.0/gems/rspec-mocks-3.13.5/README.md | 465 + .../rspec-mocks-3.13.5/lib/rspec/mocks.rb | 133 + .../lib/rspec/mocks/any_instance.rb | 11 + .../lib/rspec/mocks/any_instance/chain.rb | 111 + .../mocks/any_instance/error_generator.rb | 31 + .../mocks/any_instance/expect_chain_chain.rb | 31 + .../mocks/any_instance/expectation_chain.rb | 50 + .../mocks/any_instance/message_chains.rb | 83 + .../lib/rspec/mocks/any_instance/proxy.rb | 125 + .../lib/rspec/mocks/any_instance/recorder.rb | 299 + .../rspec/mocks/any_instance/stub_chain.rb | 51 + .../mocks/any_instance/stub_chain_chain.rb | 23 + .../lib/rspec/mocks/argument_list_matcher.rb | 117 + .../lib/rspec/mocks/argument_matchers.rb | 366 + .../lib/rspec/mocks/configuration.rb | 212 + .../lib/rspec/mocks/error_generator.rb | 390 + .../lib/rspec/mocks/example_methods.rb | 434 + .../rspec/mocks/instance_method_stasher.rb | 146 + .../lib/rspec/mocks/marshal_extension.rb | 41 + .../matchers/expectation_customization.rb | 20 + .../lib/rspec/mocks/matchers/have_received.rb | 134 + .../lib/rspec/mocks/matchers/receive.rb | 134 + .../mocks/matchers/receive_message_chain.rb | 82 + .../rspec/mocks/matchers/receive_messages.rb | 77 + .../lib/rspec/mocks/message_chain.rb | 87 + .../lib/rspec/mocks/message_expectation.rb | 856 + .../lib/rspec/mocks/method_double.rb | 316 + .../lib/rspec/mocks/method_reference.rb | 214 + .../lib/rspec/mocks/minitest_integration.rb | 68 + .../lib/rspec/mocks/mutate_const.rb | 339 + .../lib/rspec/mocks/object_reference.rb | 149 + .../lib/rspec/mocks/order_group.rb | 81 + .../lib/rspec/mocks/proxy.rb | 517 + .../lib/rspec/mocks/space.rb | 238 + .../lib/rspec/mocks/standalone.rb | 3 + .../lib/rspec/mocks/syntax.rb | 325 + .../lib/rspec/mocks/targets.rb | 124 + .../lib/rspec/mocks/test_double.rb | 173 + .../lib/rspec/mocks/verifying_double.rb | 125 + .../mocks/verifying_message_expectation.rb | 55 + .../lib/rspec/mocks/verifying_proxy.rb | 221 + .../lib/rspec/mocks/version.rb | 9 + .../gems/rspec-support-3.13.5/Changelog.md | 429 + .../gems/rspec-support-3.13.5/LICENSE.md | 23 + .../3.2.0/gems/rspec-support-3.13.5/README.md | 40 + .../rspec-support-3.13.5/lib/rspec/support.rb | 162 + .../lib/rspec/support/caller_filter.rb | 85 + .../lib/rspec/support/comparable_version.rb | 48 + .../lib/rspec/support/differ.rb | 217 + .../lib/rspec/support/directory_maker.rb | 65 + .../lib/rspec/support/encoded_string.rb | 163 + .../lib/rspec/support/fuzzy_matcher.rb | 50 + .../lib/rspec/support/hunk_generator.rb | 49 + .../lib/rspec/support/matcher_definition.rb | 44 + .../support/method_signature_verifier.rb | 467 + .../lib/rspec/support/mutex.rb | 75 + .../lib/rspec/support/object_formatter.rb | 279 + .../rspec/support/recursive_const_methods.rb | 78 + .../lib/rspec/support/reentrant_mutex.rb | 80 + .../lib/rspec/support/ruby_features.rb | 221 + .../lib/rspec/support/source.rb | 87 + .../lib/rspec/support/source/location.rb | 23 + .../lib/rspec/support/source/node.rb | 112 + .../lib/rspec/support/source/token.rb | 96 + .../lib/rspec/support/spec.rb | 84 + .../rspec/support/spec/deprecation_helpers.rb | 50 + .../lib/rspec/support/spec/diff_helpers.rb | 45 + .../rspec/support/spec/formatting_support.rb | 11 + .../lib/rspec/support/spec/in_sub_process.rb | 73 + .../rspec/support/spec/library_wide_checks.rb | 152 + .../lib/rspec/support/spec/shell_out.rb | 115 + .../lib/rspec/support/spec/stderr_splitter.rb | 77 + .../lib/rspec/support/spec/string_matcher.rb | 47 + .../support/spec/with_isolated_directory.rb | 15 + .../support/spec/with_isolated_stderr.rb | 15 + .../lib/rspec/support/version.rb | 9 + .../lib/rspec/support/warnings.rb | 41 + .../support/with_keywords_when_needed.rb | 35 + .../3.2.0/gems/simplecov-0.22.0/CHANGELOG.md | 191 + .../ruby/3.2.0/gems/simplecov-0.22.0/LICENSE | 20 + .../3.2.0/gems/simplecov-0.22.0/README.md | 974 + .../doc/alternate-formatters.md | 71 + .../doc/commercial-services.md | 25 + .../doc/editor-integration.md | 18 + .../lib/minitest/simplecov_plugin.rb | 15 + .../gems/simplecov-0.22.0/lib/simplecov.rb | 470 + .../simplecov-0.22.0/lib/simplecov/combine.rb | 30 + .../simplecov/combine/branches_combiner.rb | 32 + .../lib/simplecov/combine/files_combiner.rb | 24 + .../lib/simplecov/combine/lines_combiner.rb | 43 + .../lib/simplecov/combine/results_combiner.rb | 60 + .../lib/simplecov/command_guesser.rb | 64 + .../lib/simplecov/configuration.rb | 500 + .../lib/simplecov/coverage_statistics.rb | 56 + .../lib/simplecov/default_formatter.rb | 20 + .../lib/simplecov/defaults.rb | 53 + .../lib/simplecov/exit_codes.rb | 15 + .../exit_codes/exit_code_handling.rb | 29 + .../exit_codes/maximum_coverage_drop_check.rb | 83 + .../minimum_coverage_by_file_check.rb | 54 + .../minimum_overall_coverage_check.rb | 53 + .../lib/simplecov/file_list.rb | 120 + .../simplecov-0.22.0/lib/simplecov/filter.rb | 94 + .../lib/simplecov/formatter.rb | 10 + .../simplecov/formatter/multi_formatter.rb | 32 + .../simplecov/formatter/simple_formatter.rb | 25 + .../lib/simplecov/last_run.rb | 28 + .../lib/simplecov/lines_classifier.rb | 48 + .../lib/simplecov/load_global_config.rb | 8 + .../lib/simplecov/no_defaults.rb | 4 + .../simplecov-0.22.0/lib/simplecov/process.rb | 19 + .../lib/simplecov/profiles.rb | 35 + .../lib/simplecov/profiles/bundler_filter.rb | 5 + .../lib/simplecov/profiles/hidden_filter.rb | 5 + .../lib/simplecov/profiles/rails.rb | 18 + .../lib/simplecov/profiles/root_filter.rb | 10 + .../lib/simplecov/profiles/test_frameworks.rb | 8 + .../simplecov-0.22.0/lib/simplecov/result.rb | 94 + .../lib/simplecov/result_adapter.rb | 30 + .../lib/simplecov/result_merger.rb | 194 + .../lib/simplecov/simulate_coverage.rb | 29 + .../lib/simplecov/source_file.rb | 355 + .../lib/simplecov/source_file/branch.rb | 84 + .../lib/simplecov/source_file/line.rb | 72 + .../lib/simplecov/useless_results_remover.rb | 18 + .../simplecov-0.22.0/lib/simplecov/version.rb | 5 + .../gems/simplecov-html-0.13.2/.document | 5 + .../gems/simplecov-html-0.13.2/CHANGELOG.md | 114 + .../3.2.0/gems/simplecov-html-0.13.2/LICENSE | 20 + .../gems/simplecov-html-0.13.2/README.md | 30 + .../3.2.0/gems/simplecov-html-0.13.2/Rakefile | 55 + .../lib/simplecov-html.rb | 165 + .../lib/simplecov-html/version.rb | 9 + .../DataTables-1.10.20/images/sort_asc.png | Bin 0 -> 160 bytes .../images/sort_asc_disabled.png | Bin 0 -> 148 bytes .../DataTables-1.10.20/images/sort_both.png | Bin 0 -> 201 bytes .../DataTables-1.10.20/images/sort_desc.png | Bin 0 -> 158 bytes .../images/sort_desc_disabled.png | Bin 0 -> 146 bytes .../public/application.css | 1 + .../public/application.js | 7 + .../public/colorbox/border.png | Bin 0 -> 163 bytes .../public/colorbox/controls.png | Bin 0 -> 2033 bytes .../public/colorbox/loading.gif | Bin 0 -> 9427 bytes .../public/colorbox/loading_background.png | Bin 0 -> 166 bytes .../public/favicon_green.png | Bin 0 -> 1009 bytes .../public/favicon_red.png | Bin 0 -> 1009 bytes .../public/favicon_yellow.png | Bin 0 -> 1009 bytes .../images/ui-bg_flat_0_aaaaaa_40x100.png | Bin 0 -> 180 bytes .../images/ui-bg_flat_75_ffffff_40x100.png | Bin 0 -> 178 bytes .../images/ui-bg_glass_55_fbf9ee_1x400.png | Bin 0 -> 120 bytes .../images/ui-bg_glass_65_ffffff_1x400.png | Bin 0 -> 105 bytes .../images/ui-bg_glass_75_dadada_1x400.png | Bin 0 -> 111 bytes .../images/ui-bg_glass_75_e6e6e6_1x400.png | Bin 0 -> 110 bytes .../images/ui-bg_glass_95_fef1ec_1x400.png | Bin 0 -> 119 bytes .../ui-bg_highlight-soft_75_cccccc_1x100.png | Bin 0 -> 101 bytes .../public/images/ui-icons_222222_256x240.png | Bin 0 -> 4369 bytes .../public/images/ui-icons_2e83ff_256x240.png | Bin 0 -> 4369 bytes .../public/images/ui-icons_454545_256x240.png | Bin 0 -> 4369 bytes .../public/images/ui-icons_888888_256x240.png | Bin 0 -> 4369 bytes .../public/images/ui-icons_cd0a0a_256x240.png | Bin 0 -> 4369 bytes .../simplecov-html-0.13.2/public/loading.gif | Bin 0 -> 7247 bytes .../simplecov-html-0.13.2/public/magnify.png | Bin 0 -> 1301 bytes .../views/covered_percent.erb | 3 + .../simplecov-html-0.13.2/views/file_list.erb | 78 + .../simplecov-html-0.13.2/views/layout.erb | 40 + .../views/source_file.erb | 57 + .../CHANGELOG.md | 13 + .../simplecov_json_formatter-0.1.4/README.md | 29 + .../lib/simplecov_json_formatter.rb | 36 + .../result_exporter.rb | 27 + .../result_hash_formatter.rb | 52 + .../source_file_formatter.rb | 65 + .../lib/simplecov_json_formatter/version.rb | 5 + .../ruby/3.2.0/gems/uri-1.0.3/.document | 5 + .../ruby/3.2.0/gems/uri-1.0.3/.rdoc_options | 4 + vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/BSDL | 22 + .../bundle/ruby/3.2.0/gems/uri-1.0.3/COPYING | 56 + .../ruby/3.2.0/gems/uri-1.0.3/README.md | 55 + .../ruby/3.2.0/gems/uri-1.0.3/docs/kernel.rb | 2 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri.rb | 104 + .../3.2.0/gems/uri-1.0.3/lib/uri/common.rb | 880 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri/file.rb | 100 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri/ftp.rb | 267 + .../3.2.0/gems/uri-1.0.3/lib/uri/generic.rb | 1579 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri/http.rb | 125 + .../3.2.0/gems/uri-1.0.3/lib/uri/https.rb | 23 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri/ldap.rb | 261 + .../3.2.0/gems/uri-1.0.3/lib/uri/ldaps.rb | 22 + .../3.2.0/gems/uri-1.0.3/lib/uri/mailto.rb | 293 + .../gems/uri-1.0.3/lib/uri/rfc2396_parser.rb | 546 + .../gems/uri-1.0.3/lib/uri/rfc3986_parser.rb | 206 + .../3.2.0/gems/uri-1.0.3/lib/uri/version.rb | 6 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri/ws.rb | 83 + .../ruby/3.2.0/gems/uri-1.0.3/lib/uri/wss.rb | 23 + .../ruby/3.2.0/gems/yard-0.9.37/.yardopts | 26 + .../3.2.0/gems/yard-0.9.37/.yardopts_guide | 19 + .../3.2.0/gems/yard-0.9.37/.yardopts_i18n | 23 + .../ruby/3.2.0/gems/yard-0.9.37/CHANGELOG.md | 907 + .../bundle/ruby/3.2.0/gems/yard-0.9.37/LEGAL | 94 + .../ruby/3.2.0/gems/yard-0.9.37/LICENSE | 22 + .../ruby/3.2.0/gems/yard-0.9.37/README.md | 332 + .../gems/yard-0.9.37/docs/CodeObjects.md | 115 + .../gems/yard-0.9.37/docs/GettingStarted.md | 679 + .../3.2.0/gems/yard-0.9.37/docs/Handlers.md | 152 + .../3.2.0/gems/yard-0.9.37/docs/Overview.md | 61 + .../3.2.0/gems/yard-0.9.37/docs/Parser.md | 191 + .../ruby/3.2.0/gems/yard-0.9.37/docs/Tags.md | 283 + .../3.2.0/gems/yard-0.9.37/docs/TagsArch.md | 123 + .../3.2.0/gems/yard-0.9.37/docs/Templates.md | 496 + .../3.2.0/gems/yard-0.9.37/docs/WhatsNew.md | 1245 + .../images/code-objects-class-diagram.png | Bin 0 -> 63056 bytes .../docs/images/handlers-class-diagram.png | Bin 0 -> 11073 bytes .../docs/images/overview-class-diagram.png | Bin 0 -> 7956 bytes .../docs/images/parser-class-diagram.png | Bin 0 -> 11287 bytes .../docs/images/tags-class-diagram.png | Bin 0 -> 9209 bytes .../default/fulldoc/html/full_list_tag.erb | 9 + .../templates/default/fulldoc/html/setup.rb | 6 + .../templates/default/layout/html/setup.rb | 9 + .../default/layout/html/tag_list.erb | 11 + .../templates/default/yard_tags/html/list.erb | 18 + .../templates/default/yard_tags/html/setup.rb | 26 + .../gems/yard-0.9.37/docs/templates/plugin.rb | 70 + .../gems/yard-0.9.37/lib/rubygems_plugin.rb | 9 + .../ruby/3.2.0/gems/yard-0.9.37/lib/yard.rb | 75 + .../gems/yard-0.9.37/lib/yard/autoload.rb | 310 + .../gems/yard-0.9.37/lib/yard/cli/command.rb | 85 + .../lib/yard/cli/command_parser.rb | 93 + .../gems/yard-0.9.37/lib/yard/cli/config.rb | 198 + .../gems/yard-0.9.37/lib/yard/cli/diff.rb | 273 + .../gems/yard-0.9.37/lib/yard/cli/display.rb | 69 + .../gems/yard-0.9.37/lib/yard/cli/gems.rb | 84 + .../gems/yard-0.9.37/lib/yard/cli/graph.rb | 125 + .../gems/yard-0.9.37/lib/yard/cli/help.rb | 20 + .../gems/yard-0.9.37/lib/yard/cli/i18n.rb | 70 + .../gems/yard-0.9.37/lib/yard/cli/list.rb | 23 + .../yard-0.9.37/lib/yard/cli/markup_types.rb | 32 + .../gems/yard-0.9.37/lib/yard/cli/server.rb | 266 + .../gems/yard-0.9.37/lib/yard/cli/stats.rb | 234 + .../gems/yard-0.9.37/lib/yard/cli/yardoc.rb | 791 + .../lib/yard/cli/yardopts_command.rb | 110 + .../gems/yard-0.9.37/lib/yard/cli/yri.rb | 215 + .../yard-0.9.37/lib/yard/code_objects/base.rb | 626 + .../lib/yard/code_objects/class_object.rb | 146 + .../code_objects/class_variable_object.rb | 11 + .../lib/yard/code_objects/constant_object.rb | 16 + .../code_objects/extended_method_object.rb | 24 + .../yard/code_objects/extra_file_object.rb | 134 + .../lib/yard/code_objects/macro_object.rb | 171 + .../lib/yard/code_objects/method_object.rb | 196 + .../lib/yard/code_objects/module_object.rb | 21 + .../lib/yard/code_objects/namespace_mapper.rb | 141 + .../lib/yard/code_objects/namespace_object.rb | 200 + .../lib/yard/code_objects/proxy.rb | 245 + .../lib/yard/code_objects/root_object.rb | 19 + .../3.2.0/gems/yard-0.9.37/lib/yard/config.rb | 274 + .../yard-0.9.37/lib/yard/core_ext/array.rb | 16 + .../yard-0.9.37/lib/yard/core_ext/file.rb | 69 + .../yard-0.9.37/lib/yard/core_ext/hash.rb | 16 + .../lib/yard/core_ext/insertion.rb | 63 + .../yard-0.9.37/lib/yard/core_ext/module.rb | 11 + .../yard-0.9.37/lib/yard/core_ext/string.rb | 68 + .../lib/yard/core_ext/symbol_hash.rb | 75 + .../gems/yard-0.9.37/lib/yard/docstring.rb | 386 + .../yard-0.9.37/lib/yard/docstring_parser.rb | 344 + .../gems/yard-0.9.37/lib/yard/gem_index.rb | 29 + .../gems/yard-0.9.37/lib/yard/globals.rb | 22 + .../yard-0.9.37/lib/yard/handlers/base.rb | 595 + .../lib/yard/handlers/c/alias_handler.rb | 16 + .../lib/yard/handlers/c/attribute_handler.rb | 13 + .../yard-0.9.37/lib/yard/handlers/c/base.rb | 164 + .../lib/yard/handlers/c/class_handler.rb | 27 + .../lib/yard/handlers/c/constant_handler.rb | 13 + .../lib/yard/handlers/c/handler_methods.rb | 212 + .../lib/yard/handlers/c/init_handler.rb | 20 + .../lib/yard/handlers/c/method_handler.rb | 45 + .../lib/yard/handlers/c/mixin_handler.rb | 21 + .../lib/yard/handlers/c/module_handler.rb | 17 + .../handlers/c/override_comment_handler.rb | 31 + .../lib/yard/handlers/c/path_handler.rb | 11 + .../lib/yard/handlers/c/struct_handler.rb | 13 + .../lib/yard/handlers/c/symbol_handler.rb | 8 + .../yard/handlers/common/method_handler.rb | 19 + .../lib/yard/handlers/processor.rb | 199 + .../lib/yard/handlers/ruby/alias_handler.rb | 45 + .../yard/handlers/ruby/attribute_handler.rb | 87 + .../lib/yard/handlers/ruby/base.rb | 165 + .../handlers/ruby/class_condition_handler.rb | 92 + .../lib/yard/handlers/ruby/class_handler.rb | 119 + .../handlers/ruby/class_variable_handler.rb | 17 + .../lib/yard/handlers/ruby/comment_handler.rb | 10 + .../yard/handlers/ruby/constant_handler.rb | 55 + .../ruby/decorator_handler_methods.rb | 123 + .../lib/yard/handlers/ruby/dsl_handler.rb | 15 + .../yard/handlers/ruby/dsl_handler_methods.rb | 96 + .../yard/handlers/ruby/exception_handler.rb | 27 + .../lib/yard/handlers/ruby/extend_handler.rb | 22 + .../handlers/ruby/legacy/alias_handler.rb | 37 + .../handlers/ruby/legacy/attribute_handler.rb | 65 + .../lib/yard/handlers/ruby/legacy/base.rb | 245 + .../ruby/legacy/class_condition_handler.rb | 83 + .../handlers/ruby/legacy/class_handler.rb | 113 + .../ruby/legacy/class_variable_handler.rb | 15 + .../handlers/ruby/legacy/comment_handler.rb | 10 + .../handlers/ruby/legacy/constant_handler.rb | 29 + .../yard/handlers/ruby/legacy/dsl_handler.rb | 17 + .../handlers/ruby/legacy/exception_handler.rb | 13 + .../handlers/ruby/legacy/extend_handler.rb | 21 + .../handlers/ruby/legacy/method_handler.rb | 90 + .../handlers/ruby/legacy/mixin_handler.rb | 39 + .../ruby/legacy/module_function_handler.rb | 19 + .../handlers/ruby/legacy/module_handler.rb | 12 + .../legacy/private_class_method_handler.rb | 22 + .../ruby/legacy/private_constant_handler.rb | 22 + .../ruby/legacy/visibility_handler.rb | 17 + .../handlers/ruby/legacy/yield_handler.rb | 29 + .../handlers/ruby/method_condition_handler.rb | 9 + .../lib/yard/handlers/ruby/method_handler.rb | 104 + .../lib/yard/handlers/ruby/mixin_handler.rb | 61 + .../handlers/ruby/module_function_handler.rb | 39 + .../lib/yard/handlers/ruby/module_handler.rb | 12 + .../ruby/private_class_method_handler.rb | 14 + .../handlers/ruby/private_constant_handler.rb | 43 + .../ruby/public_class_method_handler.rb | 14 + .../handlers/ruby/struct_handler_methods.rb | 143 + .../yard/handlers/ruby/visibility_handler.rb | 34 + .../lib/yard/handlers/ruby/yield_handler.rb | 31 + .../gems/yard-0.9.37/lib/yard/i18n/locale.rb | 67 + .../gems/yard-0.9.37/lib/yard/i18n/message.rb | 57 + .../yard-0.9.37/lib/yard/i18n/messages.rb | 56 + .../yard-0.9.37/lib/yard/i18n/po_parser.rb | 61 + .../lib/yard/i18n/pot_generator.rb | 290 + .../gems/yard-0.9.37/lib/yard/i18n/text.rb | 173 + .../gems/yard-0.9.37/lib/yard/logging.rb | 260 + .../gems/yard-0.9.37/lib/yard/open_struct.rb | 67 + .../gems/yard-0.9.37/lib/yard/options.rb | 217 + .../gems/yard-0.9.37/lib/yard/parser/base.rb | 57 + .../yard-0.9.37/lib/yard/parser/c/c_parser.rb | 235 + .../lib/yard/parser/c/comment_parser.rb | 134 + .../lib/yard/parser/c/statement.rb | 66 + .../lib/yard/parser/ruby/ast_node.rb | 558 + .../lib/yard/parser/ruby/legacy/irb/slex.rb | 276 + .../lib/yard/parser/ruby/legacy/ruby_lex.rb | 1360 + .../yard/parser/ruby/legacy/ruby_parser.rb | 32 + .../lib/yard/parser/ruby/legacy/statement.rb | 68 + .../yard/parser/ruby/legacy/statement_list.rb | 394 + .../lib/yard/parser/ruby/legacy/token_list.rb | 74 + .../lib/yard/parser/ruby/ruby_parser.rb | 707 + .../lib/yard/parser/ruby/token_resolver.rb | 158 + .../lib/yard/parser/source_parser.rb | 525 + .../yard-0.9.37/lib/yard/rake/yardoc_task.rb | 81 + .../gems/yard-0.9.37/lib/yard/registry.rb | 439 + .../yard-0.9.37/lib/yard/registry_resolver.rb | 218 + .../yard-0.9.37/lib/yard/registry_store.rb | 342 + .../lib/yard/rubygems/backports.rb | 10 + .../lib/yard/rubygems/backports/LICENSE.txt | 57 + .../lib/yard/rubygems/backports/MIT.txt | 20 + .../lib/yard/rubygems/backports/gem.rb | 10 + .../yard/rubygems/backports/source_index.rb | 365 + .../lib/yard/rubygems/doc_manager.rb | 90 + .../yard-0.9.37/lib/yard/rubygems/hook.rb | 197 + .../lib/yard/rubygems/specification.rb | 50 + .../yard-0.9.37/lib/yard/serializers/base.rb | 83 + .../serializers/file_system_serializer.rb | 123 + .../yard/serializers/process_serializer.rb | 24 + .../lib/yard/serializers/stdout_serializer.rb | 34 + .../lib/yard/serializers/yardoc_serializer.rb | 152 + .../3.2.0/gems/yard-0.9.37/lib/yard/server.rb | 13 + .../yard-0.9.37/lib/yard/server/adapter.rb | 100 + .../lib/yard/server/commands/base.rb | 209 + .../server/commands/display_file_command.rb | 29 + .../server/commands/display_object_command.rb | 65 + .../yard/server/commands/frames_command.rb | 16 + .../yard/server/commands/library_command.rb | 187 + .../server/commands/library_index_command.rb | 28 + .../lib/yard/server/commands/list_command.rb | 25 + .../server/commands/root_request_command.rb | 15 + .../yard/server/commands/search_command.rb | 79 + .../server/commands/static_file_command.rb | 23 + .../server/commands/static_file_helpers.rb | 60 + .../lib/yard/server/doc_server_helper.rb | 91 + .../lib/yard/server/doc_server_serializer.rb | 39 + .../yard-0.9.37/lib/yard/server/http_utils.rb | 512 + .../lib/yard/server/library_version.rb | 277 + .../lib/yard/server/rack_adapter.rb | 97 + .../yard-0.9.37/lib/yard/server/router.rb | 187 + .../lib/yard/server/static_caching.rb | 46 + .../default/fulldoc/html/css/custom.css | 127 + .../fulldoc/html/images/processing.gif | Bin 0 -> 1251 bytes .../default/fulldoc/html/js/autocomplete.js | 12 + .../default/layout/html/breadcrumb.erb | 37 + .../default/layout/html/script_setup.erb | 7 + .../templates/default/layout/html/setup.rb | 8 + .../default/method_details/html/permalink.erb | 4 + .../default/method_details/html/setup.rb | 5 + .../doc_server/library_list/html/headers.erb | 8 + .../library_list/html/library_list.erb | 14 + .../doc_server/library_list/html/listing.erb | 13 + .../doc_server/library_list/html/setup.rb | 6 + .../doc_server/library_list/html/title.erb | 2 + .../doc_server/processing/html/processing.erb | 52 + .../doc_server/processing/html/setup.rb | 4 + .../doc_server/search/html/search.erb | 18 + .../templates/doc_server/search/html/setup.rb | 9 + .../lib/yard/server/webrick_adapter.rb | 45 + .../lib/yard/tags/default_factory.rb | 192 + .../yard-0.9.37/lib/yard/tags/default_tag.rb | 13 + .../yard-0.9.37/lib/yard/tags/directives.rb | 624 + .../gems/yard-0.9.37/lib/yard/tags/library.rb | 633 + .../yard-0.9.37/lib/yard/tags/option_tag.rb | 13 + .../yard-0.9.37/lib/yard/tags/overload_tag.rb | 71 + .../gems/yard-0.9.37/lib/yard/tags/ref_tag.rb | 8 + .../yard-0.9.37/lib/yard/tags/ref_tag_list.rb | 28 + .../gems/yard-0.9.37/lib/yard/tags/tag.rb | 72 + .../lib/yard/tags/tag_format_error.rb | 7 + .../lib/yard/tags/types_explainer.rb | 162 + .../yard-0.9.37/lib/yard/templates/engine.rb | 185 + .../lib/yard/templates/erb_cache.rb | 23 + .../lib/yard/templates/helpers/base_helper.rb | 215 + .../yard/templates/helpers/filter_helper.rb | 27 + .../lib/yard/templates/helpers/html_helper.rb | 673 + .../helpers/html_syntax_highlight_helper.rb | 78 + .../templates/helpers/markup/rdoc_markdown.rb | 23 + .../templates/helpers/markup/rdoc_markup.rb | 110 + .../yard/templates/helpers/markup_helper.rb | 173 + .../yard/templates/helpers/method_helper.rb | 77 + .../yard/templates/helpers/module_helper.rb | 21 + .../lib/yard/templates/helpers/text_helper.rb | 112 + .../lib/yard/templates/helpers/uml_helper.rb | 47 + .../yard-0.9.37/lib/yard/templates/section.rb | 103 + .../lib/yard/templates/template.rb | 420 + .../lib/yard/templates/template_options.rb | 91 + .../gems/yard-0.9.37/lib/yard/verifier.rb | 151 + .../gems/yard-0.9.37/lib/yard/version.rb | 6 + .../ruby/3.2.0/gems/yard-0.9.37/po/ja.po | 31108 ++++++++++++++++ .../templates/default/class/dot/setup.rb | 7 + .../default/class/dot/superklass.erb | 3 + .../class/html/constructor_details.erb | 8 + .../templates/default/class/html/setup.rb | 2 + .../default/class/html/subclasses.erb | 4 + .../templates/default/class/setup.rb | 36 + .../templates/default/class/text/setup.rb | 12 + .../default/class/text/subclasses.erb | 5 + .../default/constant/text/header.erb | 11 + .../templates/default/constant/text/setup.rb | 4 + .../default/docstring/html/abstract.erb | 4 + .../default/docstring/html/deprecated.erb | 1 + .../default/docstring/html/index.erb | 5 + .../templates/default/docstring/html/note.erb | 6 + .../default/docstring/html/private.erb | 4 + .../default/docstring/html/returns_void.erb | 1 + .../templates/default/docstring/html/text.erb | 1 + .../templates/default/docstring/html/todo.erb | 6 + .../templates/default/docstring/setup.rb | 52 + .../default/docstring/text/abstract.erb | 2 + .../default/docstring/text/deprecated.erb | 2 + .../default/docstring/text/index.erb | 2 + .../templates/default/docstring/text/note.erb | 4 + .../default/docstring/text/private.erb | 2 + .../default/docstring/text/returns_void.erb | 1 + .../templates/default/docstring/text/text.erb | 1 + .../templates/default/docstring/text/todo.erb | 4 + .../default/fulldoc/html/css/common.css | 1 + .../default/fulldoc/html/css/full_list.css | 58 + .../default/fulldoc/html/css/style.css | 503 + .../templates/default/fulldoc/html/frames.erb | 22 + .../default/fulldoc/html/full_list.erb | 40 + .../default/fulldoc/html/full_list_class.erb | 2 + .../default/fulldoc/html/full_list_file.erb | 7 + .../default/fulldoc/html/full_list_method.erb | 10 + .../templates/default/fulldoc/html/js/app.js | 344 + .../default/fulldoc/html/js/full_list.js | 242 + .../default/fulldoc/html/js/jquery.js | 4 + .../templates/default/fulldoc/html/setup.rb | 249 + .../templates/default/layout/dot/header.erb | 6 + .../templates/default/layout/dot/setup.rb | 15 + .../default/layout/html/breadcrumb.erb | 11 + .../templates/default/layout/html/files.erb | 11 + .../templates/default/layout/html/footer.erb | 5 + .../templates/default/layout/html/headers.erb | 15 + .../templates/default/layout/html/index.erb | 2 + .../templates/default/layout/html/layout.erb | 24 + .../templates/default/layout/html/listing.erb | 4 + .../templates/default/layout/html/objects.erb | 32 + .../default/layout/html/script_setup.erb | 4 + .../templates/default/layout/html/search.erb | 13 + .../templates/default/layout/html/setup.rb | 89 + .../templates/default/method/html/header.erb | 17 + .../templates/default/method/setup.rb | 4 + .../templates/default/method/text/header.erb | 1 + .../default/method_details/html/header.erb | 3 + .../method_details/html/method_signature.erb | 25 + .../default/method_details/html/source.erb | 10 + .../templates/default/method_details/setup.rb | 11 + .../default/method_details/text/header.erb | 10 + .../method_details/text/method_signature.erb | 12 + .../default/method_details/text/setup.rb | 11 + .../templates/default/module/dot/child.erb | 1 + .../default/module/dot/dependencies.erb | 3 + .../templates/default/module/dot/header.erb | 6 + .../templates/default/module/dot/info.erb | 14 + .../templates/default/module/dot/setup.rb | 15 + .../default/module/html/attribute_details.erb | 10 + .../default/module/html/attribute_summary.erb | 8 + .../default/module/html/box_info.erb | 43 + .../default/module/html/children.erb | 8 + .../default/module/html/constant_summary.erb | 17 + .../templates/default/module/html/defines.erb | 3 + .../templates/default/module/html/header.erb | 5 + .../module/html/inherited_attributes.erb | 14 + .../module/html/inherited_constants.erb | 8 + .../default/module/html/inherited_methods.erb | 19 + .../default/module/html/item_summary.erb | 40 + .../module/html/method_details_list.erb | 9 + .../default/module/html/method_summary.erb | 14 + .../default/module/html/methodmissing.erb | 12 + .../default/module/html/pre_docstring.erb | 1 + .../templates/default/module/setup.rb | 167 + .../default/module/text/children.erb | 10 + .../default/module/text/class_meths_list.erb | 8 + .../templates/default/module/text/extends.erb | 8 + .../templates/default/module/text/header.erb | 7 + .../default/module/text/includes.erb | 8 + .../module/text/instance_meths_list.erb | 8 + .../templates/default/module/text/setup.rb | 13 + .../templates/default/onefile/html/files.erb | 5 + .../default/onefile/html/headers.erb | 8 + .../templates/default/onefile/html/layout.erb | 17 + .../templates/default/onefile/html/readme.erb | 3 + .../templates/default/onefile/html/setup.rb | 62 + .../templates/default/root/dot/child.erb | 3 + .../templates/default/root/dot/setup.rb | 6 + .../templates/default/root/html/setup.rb | 2 + .../templates/default/tags/html/example.erb | 11 + .../templates/default/tags/html/index.erb | 3 + .../templates/default/tags/html/option.erb | 24 + .../templates/default/tags/html/overload.erb | 14 + .../templates/default/tags/html/see.erb | 8 + .../templates/default/tags/html/tag.erb | 20 + .../templates/default/tags/setup.rb | 57 + .../templates/default/tags/text/example.erb | 12 + .../templates/default/tags/text/index.erb | 1 + .../templates/default/tags/text/option.erb | 20 + .../templates/default/tags/text/overload.erb | 19 + .../templates/default/tags/text/see.erb | 11 + .../templates/default/tags/text/tag.erb | 13 + .../templates/guide/class/html/setup.rb | 2 + .../templates/guide/docstring/html/setup.rb | 2 + .../guide/fulldoc/html/css/style.css | 108 + .../templates/guide/fulldoc/html/js/app.js | 33 + .../templates/guide/fulldoc/html/setup.rb | 74 + .../templates/guide/layout/html/layout.erb | 81 + .../templates/guide/layout/html/setup.rb | 25 + .../templates/guide/method/html/header.erb | 18 + .../templates/guide/method/html/setup.rb | 22 + .../templates/guide/module/html/header.erb | 7 + .../guide/module/html/method_list.erb | 5 + .../templates/guide/module/html/setup.rb | 27 + .../templates/guide/onefile/html/files.erb | 4 + .../templates/guide/onefile/html/setup.rb | 6 + .../templates/guide/onefile/html/toc.erb | 3 + .../templates/guide/tags/html/setup.rb | 9 + .../bundle/ruby/3.2.0/plugins/yard_plugin.rb | 1 + .../3.2.0/specifications/base64-0.3.0.gemspec | 23 + .../specifications/benchmark-0.4.1.gemspec | 23 + .../specifications/diff-lcs-1.6.2.gemspec | 35 + .../3.2.0/specifications/docile-1.4.1.gemspec | 22 + .../equivalent-xml-0.6.0.gemspec | 31 + .../specifications/fakeweb-1.3.0.gemspec | 26 + .../specifications/faraday-2.13.4.gemspec | Bin 0 -> 1394 bytes .../faraday-net_http-3.4.1.gemspec | 26 + .../3.2.0/specifications/json-2.13.2.gemspec | Bin 0 -> 1593 bytes .../3.2.0/specifications/jwt-3.1.2.gemspec | 34 + .../3.2.0/specifications/logger-1.4.4.gemspec | 28 + .../specifications/net-http-0.6.0.gemspec | 27 + .../nokogiri-1.18.9-x86_64-linux-gnu.gemspec | 31 + .../specifications/ostruct-0.6.3.gemspec | 22 + .../3.2.0/specifications/racc-1.8.1.gemspec | 28 + .../3.2.0/specifications/rack-2.2.17.gemspec | 32 + .../3.2.0/specifications/rake-13.3.0.gemspec | 26 + .../3.2.0/specifications/rexml-3.4.4.gemspec | 25 + .../3.2.0/specifications/rspec-3.13.1.gemspec | 31 + .../specifications/rspec-core-3.13.5.gemspec | 31 + .../rspec-expectations-3.13.5.gemspec | 29 + .../specifications/rspec-mocks-3.13.5.gemspec | 29 + .../rspec-support-3.13.5.gemspec | 29 + .../specifications/simplecov-0.22.0.gemspec | 28 + .../simplecov-html-0.13.2.gemspec | 22 + .../simplecov_json_formatter-0.1.4.gemspec | 21 + .../3.2.0/specifications/uri-1.0.3.gemspec | 23 + .../3.2.0/specifications/yard-0.9.37.gemspec | 23 + 1600 files changed, 255732 insertions(+) create mode 100644 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem.build_complete create mode 100644 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem_make.out create mode 100755 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/json/ext/generator.so create mode 100755 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/json/ext/parser.so create mode 100644 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/mkmf.log create mode 100644 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem.build_complete create mode 100644 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem_make.out create mode 100755 vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/racc/cparse.so create mode 100644 vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/LEGAL create mode 100644 vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/lib/base64.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/sig/base64.rbs create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/dependabot.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/push_gem.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/test.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.gitignore create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Gemfile create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/benchmark.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/lib/benchmark.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/.rspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CODE_OF_CONDUCT.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTING.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTORS.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/LICENCE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Manifest.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/SECURITY.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/COPYING.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/artistic.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff-lcs.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/array.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/backports.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/block.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/callbacks.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/change.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/htmldiff.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/hunk.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/internals.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/ldiff.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/string.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/mise.toml create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/change_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/diff_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/123_x create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/456_x create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/aX create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/bXaX create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds1.csv create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds2.csv create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/empty create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/file1.bin create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/file2.bin create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines_with_missing_new_line create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-d create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-c create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-e create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-f create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-u create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef2 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef2 create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/hunk_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/issues_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/lcs_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/ldiff_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/patch_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/sdiff_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/spec_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_balanced_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_sequences_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/dependabot.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/workflows/main.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.gitignore create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rubocop.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.yardopts create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Gemfile create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/HISTORY.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/SECURITY.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/docile.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/backtrace_filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/chaining_fallback_context_proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/execution.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/fallback_context_proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Gemfile create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/LICENSE.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml/rspec_matchers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/spec/equivalent-xml_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.autotest create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.gitignore create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/CHANGELOG create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/LICENSE.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/README.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/fakeweb.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/ext/net_http.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/registry.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/responder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/response.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/stub_socket.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/utility.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fakeweb.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_from_curl create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_with_transfer_encoding create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_without_transfer_encoding create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_example.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_txt_file create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_allow_net_connect.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_deprecations.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_authentication.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web_open_uri.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_last_request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_open_uri.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_pathname.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_other_net_http_libraries.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_precedence.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_query_string.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_regexes.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_response_headers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_trailing_slashes.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_utility.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/History.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Manifest.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/README.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/net_fix.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/right_http_connection.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.document create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.gitignore create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/README.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/VERSION create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/net_http.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/samuel.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/request_test.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/samuel_test.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/test_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/thread_test.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_test.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter/test.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter_registry.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/connection.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/flat_params_encoder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/nested_params_encoder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/logging/formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware_registry.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/connection_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/env.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/proxy_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/request_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/ssl_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/parameters.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/rack_builder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/authorization.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/instrumentation.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/json.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/url_encoded.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/json.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/logger.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/raise_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/headers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/params_hash.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/external_adapters/faraday_specs_setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter/test_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_registry_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/connection_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/error_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_registry_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/env_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/options_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/proxy_options_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/request_options_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/flat_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/nested_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/rack_builder_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/authorization_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/instrumentation_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/json_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/url_encoded_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/json_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/logger_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/raise_error_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils/headers_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday_spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/spec_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/disabling_stub.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/fake_safe_buffer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/faraday_middleware_subclasses.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/helper_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/params_encoder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/request_method.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/streaming_response_checker.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/adapter/net_http.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/CHANGES.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/LEGAL create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/fbuffer/fbuffer.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/Makefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/extconf.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/generator.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/Makefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/extconf.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/parser.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/conf.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/simd.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/fpconv.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/jeaiii-ltoa.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/json.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/bigdecimal.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/complex.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/core.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date_time.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/exception.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/ostruct.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/range.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/rational.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/regexp.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/set.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/struct.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/symbol.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/time.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/common.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext.rb create mode 100755 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext/generator.so create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext/generator/state.rb create mode 100755 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext/parser.so create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/generic_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/truffle_ruby/generator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/AUTHORS create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CODE_OF_CONDUCT.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CONTRIBUTING.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/UPGRADING.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/base64.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/audience.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/crit.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/decode_verifier.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/expiration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issued_at.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issuer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/jwt_id.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/not_before.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/numeric.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/required.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/subject.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/verifier.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/container.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/decode_configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/jwk_configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/decode.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encode.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encoded_token.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/json.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ecdsa.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/hmac.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/none.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ps.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/rsa.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/signing_algorithm.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/unsupported.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/ec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/hmac.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_finder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/kid_as_key_digest.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/rsa.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/set.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/thumbprint.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/token.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/x5c_key_finder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/ruby-jwt.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/errors.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/log_device.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/period.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/severity.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/logger.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Gemfile create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/examples.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/included_getters.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/exceptions.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/generic_request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/header.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/proxy_delta.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/requests.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/response.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/responses.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/status.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/https.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/net-http.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/Gemfile create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE-DEPENDENCIES.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/dependencies.yml create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/depend create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/extconf.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/gumbo.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_document.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_element_description.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_entity_lookup.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser_context.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_push_parser.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exslt.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltconfig.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltexports.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLparser.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLtree.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX2.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/c14n.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/catalog.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/chvalid.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/debugXML.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/dict.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/encoding.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/entities.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/globals.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/hash.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/list.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanoftp.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanohttp.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parser.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parserInternals.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/pattern.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/relaxng.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schemasInternals.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schematron.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/threads.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/tree.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/uri.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/valid.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xinclude.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xlink.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlIO.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlautomata.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlerror.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlexports.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmemory.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmodule.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlreader.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlregexp.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlsave.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemas.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemastypes.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlstring.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlunicode.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlversion.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlwriter.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpath.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpathInternals.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpointer.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/attributes.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/documents.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extensions.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extra.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/functions.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/imports.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/keys.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/namespaces.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/numbersInternals.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/pattern.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/preproc.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/security.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/templates.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/transform.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/variables.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xslt.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltInternals.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltconfig.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltexports.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltlocale.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltutils.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/libxml2_polyfill.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.h create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/test_global_handlers.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attr.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attribute_decl.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_cdata.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_comment.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document_fragment.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_dtd.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_content.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_decl.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_encoding_handler.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_decl.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_reference.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_namespace.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node_set.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_processing_instruction.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_reader.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_relax_ng.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser_context.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_push_parser.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_schema.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_syntax_error.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_text.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_xpath_context.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xslt_stylesheet.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/gumbo-parser/CHANGES.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/gumbo-parser/Makefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/gumbo-parser/THANKS create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri.rb create mode 100755 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/3.1/nokogiri.so create mode 100755 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/3.2/nokogiri.so create mode 100755 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/3.3/nokogiri.so create mode 100755 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/3.4/nokogiri.so create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/class_resolver.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/parser.y create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/parser_extras.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/selector_cache.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/syntax_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/tokenizer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/tokenizer.rex create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/css/xpath_visitor.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/decorators/slop.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/encoding_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/extension.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/gumbo.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/builder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/document.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/document_fragment.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/element_description.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/element_description_defaults.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/encoding_reader.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/entity_lookup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/sax/parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/sax/parser_context.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html4/sax/push_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html5.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html5/builder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html5/document.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html5/document_fragment.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/html5/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/jruby/dependencies.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/jruby/nokogiri_jars.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/syntax_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/version/constant.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/version/info.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/attr.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/attribute_decl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/builder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/cdata.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/character_data.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/document.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/document_fragment.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/dtd.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/element_content.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/element_decl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/entity_decl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/entity_reference.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/namespace.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/node/save_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/node_set.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/notation.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/parse_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/pp.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/pp/character_data.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/pp/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/processing_instruction.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/reader.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/relax_ng.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/sax.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/sax/document.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/sax/parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/sax/parser_context.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/sax/push_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/schema.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/searchable.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/syntax_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/text.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/xpath.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/xpath/syntax_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xml/xpath_context.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xslt.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/nokogiri/xslt/stylesheet.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/lib/xsd/xmlparser/nokogiri.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/.gitignore create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/Gemfile create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/lib/ostruct.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/ostruct-0.6.3/ostruct.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ChangeLog create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/README.ja.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/README.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/TODO create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/en/grammar.en.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/en/grammar2.en.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/ja/command.ja.html create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/ja/debug.ja.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/ja/grammar.ja.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/ja/index.ja.html create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/ja/parser.ja.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/doc/ja/usage.ja.html create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse/Makefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse/cparse.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse/extconf.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/compat.rb create mode 100755 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/cparse.so create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/debugflags.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/exception.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/grammar.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/grammarfileparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/info.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/iset.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/logfilegenerator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/parser-text.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/parserfilegenerator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/sourcetext.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/state.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/statetransitiontable.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/lib/racc/static.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/CONTRIBUTING.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/MIT-LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/README.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/SPEC.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/contrib/rack.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/contrib/rack.svg create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/contrib/rack_logo.svg create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/contrib/rdoc.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/example/lobster.ru create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/example/protectedlobster.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/example/protectedlobster.ru create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/abstract/handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/abstract/request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/basic.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/digest/md5.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/digest/nonce.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/digest/params.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/auth/digest/request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/body_proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/builder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/cascade.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/chunked.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/common_logger.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/conditional_get.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/config.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/content_length.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/content_type.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/core_ext/regexp.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/deflater.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/directory.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/etag.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/events.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/file.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/files.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler/cgi.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler/fastcgi.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler/lsws.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler/scgi.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler/thin.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/handler/webrick.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/head.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/lint.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/lobster.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/lock.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/logger.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/media_type.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/method_override.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/mime.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/mock.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/multipart.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/multipart/generator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/multipart/parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/multipart/uploaded_file.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/null_logger.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/query_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/recursive.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/reloader.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/request.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/response.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/rewindable_input.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/runtime.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/sendfile.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/server.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/session/abstract/id.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/session/cookie.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/session/memcache.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/session/pool.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/show_exceptions.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/show_status.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/static.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/tempfile_reaper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/urlmap.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/utils.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/lib/rack/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rack-2.2.17/rack.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/History.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/MIT-LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/README.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/command_line_usage.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/example/Rakefile1 create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/example/Rakefile2 create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/example/a.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/example/b.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/example/main.c create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/glossary.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/jamis.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/proto_rake.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/rake.1 create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/rakefile.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/doc/rational.rdoc create mode 100755 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/exe/rake create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/application.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/backtrace.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/clean.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/cloneable.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/cpu_counter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/default_loader.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/dsl_definition.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/early_time.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/ext/core.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/ext/string.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/file_creation_task.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/file_list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/file_task.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/file_utils.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/file_utils_ext.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/invocation_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/invocation_exception_mixin.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/late_time.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/linked_list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/loaders/makefile.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/multi_task.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/name_space.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/packagetask.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/phony.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/private_reader.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/promise.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/pseudo_status.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/rake_module.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/rake_test_loader.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/rule_recursion_overflow_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/scope.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/task.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/task_argument_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/task_arguments.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/task_manager.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/tasklib.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/testtask.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/thread_history_display.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/thread_pool.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/trace_output.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/lib/rake/win32.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rake-13.3.0/rake.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/LICENSE.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/NEWS.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/context.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/rdoc/child.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/rdoc/document.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/rdoc/element.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/rdoc/node.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/rdoc/parent.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/tocs/child_toc.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/tocs/document_toc.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/tocs/element_toc.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/tocs/master_toc.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/tocs/node_toc.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tasks/tocs/parent_toc.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/doc/rexml/tutorial.rdoc create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/attlistdecl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/attribute.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/cdata.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/child.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/comment.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/doctype.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/document.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/dtd/attlistdecl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/dtd/dtd.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/dtd/elementdecl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/dtd/entitydecl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/dtd/notationdecl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/element.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/encoding.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/entity.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/formatters/default.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/formatters/pretty.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/formatters/transitive.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/functions.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/instruction.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/light/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/namespace.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/output.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parent.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parseexception.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/baseparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/lightparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/pullparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/sax2parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/streamparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/treeparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/ultralightparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/parsers/xpathparser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/quickpath.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/rexml.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/sax2listener.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/security.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/source.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/streamlistener.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/text.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/undefinednamespaceexception.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/validation/relaxng.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/validation/validation.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/validation/validationexception.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/xmldecl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/xmltokens.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/xpath.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rexml-3.4.4/lib/rexml/xpath_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-3.13.1/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-3.13.1/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-3.13.1/lib/rspec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-3.13.1/lib/rspec/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/.document create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/.yardopts create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/Changelog.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/README.md create mode 100755 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/exe/rspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/autorun.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/backtrace_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/coordinator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/example_minimizer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/fork_runner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/server.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/shell_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/shell_runner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/bisect/utilities.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/configuration_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/did_you_mean.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/drb.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/dsl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/example.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/example_group.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/example_status_persister.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/filter_manager.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/flat_map.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/base_bisect_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/base_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/base_text_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/bisect_drb_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/bisect_progress_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/console_codes.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/deprecation_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/documentation_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/exception_presenter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/failure_list_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/fallback_message_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/helpers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/html_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/html_printer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/html_snippet_extractor.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/json_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/profile_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/progress_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/protocol.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/snippet_extractor.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/formatters/syntax_highlighter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/hooks.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/invocations.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/memoized_helpers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/metadata.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/metadata_filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/minitest_assertions_adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/mocking_adapters/flexmock.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/mocking_adapters/mocha.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/mocking_adapters/null.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/mocking_adapters/rr.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/mocking_adapters/rspec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/notifications.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/option_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/ordering.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/output_wrapper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/pending.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/profiler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/project_initializer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/project_initializer/.rspec create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/project_initializer/spec/spec_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/rake_task.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/reporter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/ruby_project.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/runner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/sandbox.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/set.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/shared_context.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/shared_example_group.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/shell_escape.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/test_unit_assertions_adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/warnings.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-core-3.13.5/lib/rspec/core/world.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/.document create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/.yardopts create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/Changelog.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/block_snippet_extractor.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/expectation_target.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/fail_with.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/failure_aggregator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/minitest_integration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/syntax.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/expectations/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/aliased_matcher.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/all.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/base_matcher.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/be.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/be_between.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/be_instance_of.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/be_kind_of.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/be_within.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/change.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/compound.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/contain_exactly.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/count_expectation.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/cover.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/eq.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/eql.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/equal.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/exist.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/has.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/have_attributes.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/include.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/match.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/operators.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/output.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/raise_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/respond_to.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/satisfy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/start_or_end_with.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/throw_symbol.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/built_in/yield.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/composable.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/dsl.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/english_phrasing.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/fail_matchers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/generated_descriptions.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/matcher_delegator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/matcher_protocol.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-expectations-3.13.5/lib/rspec/matchers/multi_matcher_diff.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/.document create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/.yardopts create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/Changelog.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/error_generator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/expect_chain_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/expectation_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/message_chains.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/recorder.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/stub_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/any_instance/stub_chain_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/argument_list_matcher.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/argument_matchers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/error_generator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/example_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/instance_method_stasher.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/marshal_extension.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/matchers/expectation_customization.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/matchers/have_received.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/matchers/receive.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/matchers/receive_message_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/matchers/receive_messages.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/message_chain.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/message_expectation.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/method_double.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/method_reference.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/minitest_integration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/mutate_const.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/object_reference.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/order_group.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/space.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/standalone.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/syntax.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/targets.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/test_double.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/verifying_double.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/verifying_message_expectation.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/verifying_proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-mocks-3.13.5/lib/rspec/mocks/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/Changelog.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/LICENSE.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/caller_filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/comparable_version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/differ.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/directory_maker.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/encoded_string.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/fuzzy_matcher.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/hunk_generator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/matcher_definition.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/method_signature_verifier.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/mutex.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/object_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/recursive_const_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/reentrant_mutex.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/ruby_features.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/source.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/source/location.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/source/node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/source/token.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/deprecation_helpers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/diff_helpers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/formatting_support.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/in_sub_process.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/library_wide_checks.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/shell_out.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/stderr_splitter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/string_matcher.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/with_isolated_directory.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/spec/with_isolated_stderr.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/warnings.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/rspec-support-3.13.5/lib/rspec/support/with_keywords_when_needed.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/doc/alternate-formatters.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/doc/commercial-services.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/doc/editor-integration.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/minitest/simplecov_plugin.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/combine.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/combine/branches_combiner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/combine/files_combiner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/combine/lines_combiner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/combine/results_combiner.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/command_guesser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/configuration.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/coverage_statistics.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/default_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/defaults.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/exit_codes.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/exit_codes/exit_code_handling.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/exit_codes/maximum_coverage_drop_check.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/exit_codes/minimum_coverage_by_file_check.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/exit_codes/minimum_overall_coverage_check.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/file_list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/formatter/multi_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/formatter/simple_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/last_run.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/lines_classifier.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/load_global_config.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/no_defaults.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/process.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/profiles.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/profiles/bundler_filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/profiles/hidden_filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/profiles/rails.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/profiles/root_filter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/profiles/test_frameworks.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/result.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/result_adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/result_merger.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/simulate_coverage.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/source_file.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/source_file/branch.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/source_file/line.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/useless_results_remover.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-0.22.0/lib/simplecov/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/.document create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/Rakefile create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/lib/simplecov-html.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/lib/simplecov-html/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/DataTables-1.10.20/images/sort_asc.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/DataTables-1.10.20/images/sort_asc_disabled.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/DataTables-1.10.20/images/sort_both.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/DataTables-1.10.20/images/sort_desc.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/DataTables-1.10.20/images/sort_desc_disabled.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/application.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/application.js create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/colorbox/border.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/colorbox/controls.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/colorbox/loading.gif create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/colorbox/loading_background.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/favicon_green.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/favicon_red.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/favicon_yellow.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_flat_0_aaaaaa_40x100.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_flat_75_ffffff_40x100.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_glass_55_fbf9ee_1x400.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_glass_65_ffffff_1x400.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_glass_75_dadada_1x400.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_glass_75_e6e6e6_1x400.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_glass_95_fef1ec_1x400.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-bg_highlight-soft_75_cccccc_1x100.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-icons_222222_256x240.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-icons_2e83ff_256x240.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-icons_454545_256x240.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-icons_888888_256x240.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/images/ui-icons_cd0a0a_256x240.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/loading.gif create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/public/magnify.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/views/covered_percent.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/views/file_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/views/layout.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov-html-0.13.2/views/source_file.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/lib/simplecov_json_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/lib/simplecov_json_formatter/result_exporter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/lib/simplecov_json_formatter/result_hash_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/lib/simplecov_json_formatter/source_file_formatter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/simplecov_json_formatter-0.1.4/lib/simplecov_json_formatter/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/.document create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/.rdoc_options create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/BSDL create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/COPYING create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/docs/kernel.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/common.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/file.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/ftp.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/generic.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/http.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/https.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/ldap.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/ldaps.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/mailto.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/rfc2396_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/rfc3986_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/ws.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/uri-1.0.3/lib/uri/wss.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/.yardopts create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/.yardopts_guide create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/.yardopts_i18n create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/CHANGELOG.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/LEGAL create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/LICENSE create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/README.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/CodeObjects.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/GettingStarted.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/Handlers.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/Overview.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/Parser.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/Tags.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/TagsArch.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/Templates.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/WhatsNew.md create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/images/code-objects-class-diagram.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/images/handlers-class-diagram.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/images/overview-class-diagram.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/images/parser-class-diagram.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/images/tags-class-diagram.png create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/default/fulldoc/html/full_list_tag.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/default/fulldoc/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/default/layout/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/default/layout/html/tag_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/default/yard_tags/html/list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/default/yard_tags/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/docs/templates/plugin.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/rubygems_plugin.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/autoload.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/command_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/config.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/diff.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/display.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/gems.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/graph.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/help.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/i18n.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/markup_types.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/server.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/stats.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/yardoc.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/yardopts_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/cli/yri.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/class_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/class_variable_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/constant_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/extended_method_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/extra_file_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/macro_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/method_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/module_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/namespace_mapper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/namespace_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/proxy.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/code_objects/root_object.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/config.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/array.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/file.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/hash.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/insertion.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/module.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/string.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/core_ext/symbol_hash.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/docstring.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/docstring_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/gem_index.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/globals.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/alias_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/attribute_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/class_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/constant_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/handler_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/init_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/mixin_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/module_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/override_comment_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/path_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/struct_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/c/symbol_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/common/method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/processor.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/alias_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/attribute_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/class_condition_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/class_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/class_variable_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/comment_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/constant_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/decorator_handler_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/dsl_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/dsl_handler_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/exception_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/extend_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/alias_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/attribute_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/class_condition_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/class_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/class_variable_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/comment_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/constant_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/dsl_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/exception_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/extend_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/mixin_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/module_function_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/module_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/private_class_method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/private_constant_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/visibility_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/legacy/yield_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/method_condition_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/mixin_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/module_function_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/module_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/private_class_method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/private_constant_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/public_class_method_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/struct_handler_methods.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/visibility_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/handlers/ruby/yield_handler.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/i18n/locale.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/i18n/message.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/i18n/messages.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/i18n/po_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/i18n/pot_generator.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/i18n/text.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/logging.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/open_struct.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/c/c_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/c/comment_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/c/statement.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/ast_node.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/legacy/irb/slex.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/legacy/ruby_lex.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/legacy/ruby_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/legacy/statement.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/legacy/statement_list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/legacy/token_list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/ruby_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/ruby/token_resolver.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/parser/source_parser.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rake/yardoc_task.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/registry.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/registry_resolver.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/registry_store.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/backports.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/backports/LICENSE.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/backports/MIT.txt create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/backports/gem.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/backports/source_index.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/doc_manager.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/hook.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/rubygems/specification.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/serializers/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/serializers/file_system_serializer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/serializers/process_serializer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/serializers/stdout_serializer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/serializers/yardoc_serializer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/base.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/display_file_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/display_object_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/frames_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/library_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/library_index_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/list_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/root_request_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/search_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/static_file_command.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/commands/static_file_helpers.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/doc_server_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/doc_server_serializer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/http_utils.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/library_version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/rack_adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/router.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/static_caching.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/fulldoc/html/css/custom.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/fulldoc/html/images/processing.gif create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/fulldoc/html/js/autocomplete.js create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/layout/html/breadcrumb.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/layout/html/script_setup.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/layout/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/method_details/html/permalink.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/default/method_details/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/library_list/html/headers.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/library_list/html/library_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/library_list/html/listing.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/library_list/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/library_list/html/title.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/processing/html/processing.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/processing/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/search/html/search.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/templates/doc_server/search/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/server/webrick_adapter.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/default_factory.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/default_tag.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/directives.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/library.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/option_tag.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/overload_tag.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/ref_tag.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/ref_tag_list.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/tag.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/tag_format_error.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/tags/types_explainer.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/engine.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/erb_cache.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/base_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/filter_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/html_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/html_syntax_highlight_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/markup/rdoc_markdown.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/markup/rdoc_markup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/markup_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/method_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/module_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/text_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/helpers/uml_helper.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/section.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/template.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/templates/template_options.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/verifier.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/lib/yard/version.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/po/ja.po create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/dot/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/dot/superklass.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/html/constructor_details.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/html/subclasses.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/text/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/class/text/subclasses.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/constant/text/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/constant/text/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/abstract.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/deprecated.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/index.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/note.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/private.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/returns_void.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/text.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/html/todo.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/abstract.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/deprecated.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/index.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/note.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/private.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/returns_void.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/text.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/docstring/text/todo.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/css/common.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/css/full_list.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/css/style.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/frames.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/full_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/full_list_class.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/full_list_file.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/full_list_method.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/js/app.js create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/js/full_list.js create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/js/jquery.js create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/fulldoc/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/dot/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/dot/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/breadcrumb.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/files.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/footer.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/headers.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/index.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/layout.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/listing.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/objects.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/script_setup.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/search.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/layout/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method/html/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method/text/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/html/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/html/method_signature.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/html/source.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/text/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/text/method_signature.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/method_details/text/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/dot/child.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/dot/dependencies.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/dot/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/dot/info.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/dot/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/attribute_details.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/attribute_summary.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/box_info.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/children.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/constant_summary.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/defines.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/inherited_attributes.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/inherited_constants.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/inherited_methods.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/item_summary.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/method_details_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/method_summary.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/methodmissing.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/html/pre_docstring.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/children.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/class_meths_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/extends.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/includes.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/instance_meths_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/module/text/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/onefile/html/files.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/onefile/html/headers.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/onefile/html/layout.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/onefile/html/readme.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/onefile/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/root/dot/child.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/root/dot/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/root/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/html/example.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/html/index.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/html/option.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/html/overload.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/html/see.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/html/tag.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/text/example.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/text/index.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/text/option.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/text/overload.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/text/see.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/default/tags/text/tag.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/class/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/docstring/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/fulldoc/html/css/style.css create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/fulldoc/html/js/app.js create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/fulldoc/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/layout/html/layout.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/layout/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/method/html/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/method/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/module/html/header.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/module/html/method_list.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/module/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/onefile/html/files.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/onefile/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/onefile/html/toc.erb create mode 100644 vendor/bundle/ruby/3.2.0/gems/yard-0.9.37/templates/guide/tags/html/setup.rb create mode 100644 vendor/bundle/ruby/3.2.0/plugins/yard_plugin.rb create mode 100644 vendor/bundle/ruby/3.2.0/specifications/base64-0.3.0.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/benchmark-0.4.1.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/diff-lcs-1.6.2.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/docile-1.4.1.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/equivalent-xml-0.6.0.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/fakeweb-1.3.0.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/faraday-2.13.4.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/faraday-net_http-3.4.1.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/json-2.13.2.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/jwt-3.1.2.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/logger-1.4.4.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/net-http-0.6.0.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/nokogiri-1.18.9-x86_64-linux-gnu.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/ostruct-0.6.3.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/racc-1.8.1.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rack-2.2.17.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rake-13.3.0.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rexml-3.4.4.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rspec-3.13.1.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rspec-core-3.13.5.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rspec-expectations-3.13.5.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rspec-mocks-3.13.5.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/rspec-support-3.13.5.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/simplecov-0.22.0.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/simplecov-html-0.13.2.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/simplecov_json_formatter-0.1.4.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/uri-1.0.3.gemspec create mode 100644 vendor/bundle/ruby/3.2.0/specifications/yard-0.9.37.gemspec diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem.build_complete b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem.build_complete new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem_make.out b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem_make.out new file mode 100644 index 000000000..5747e06c9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/gem_make.out @@ -0,0 +1,25 @@ +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser +/usr/bin/ruby3.2 -I/usr/lib/ruby/vendor_ruby extconf.rb +checking for rb_enc_interned_str() in ruby/encoding.h... yes +checking for rb_hash_new_capa() in ruby.h... yes +checking for rb_hash_bulk_insert() in ruby.h... yes +checking for strnlen() in string.h... yes +checking for whether -std=c99 is accepted as CFLAGS... yes +checking for x86intrin.h... yes +checking for cpuid.h... yes +creating Makefile + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-6z2upz sitelibdir\=./.gem.20250912-3428-6z2upz clean + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-6z2upz sitelibdir\=./.gem.20250912-3428-6z2upz +compiling parser.c +linking shared-object json/ext/parser.so + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-6z2upz sitelibdir\=./.gem.20250912-3428-6z2upz install +/usr/bin/install -c -m 0755 parser.so ./.gem.20250912-3428-6z2upz/json/ext + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-6z2upz sitelibdir\=./.gem.20250912-3428-6z2upz clean diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/json/ext/generator.so b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/json/ext/generator.so new file mode 100755 index 0000000000000000000000000000000000000000..595371e35fc718f7457cebc811af7fac37f59ff1 GIT binary patch literal 220496 zcmeFadt6ji_dk9H7{wINSejIpQ)x+g0TqRkWEAv_j<=NZDGd<@B@qZ_ykEwKF;3&C zPgz-6Sy|bW-Obd@02R>8NKLIQ&Agl=rDi>dmz?i=?R{qEFiB6p&*$^|TDsp~<8s2o-deQ>!a#nbkS<^zyXfBWF7b@vVH*Xn8KhJPLFeX!fpbvJbdnN`ymgd~^iALT-n zJ%oSB@kyKpUNQIu39r5a<>MysZ$Ysu@vnj6ZLT1v0k|vi-`Irw2O;N5{I4~EpWj5j zLzr_ckGaWE1*ahC^J*4xd5rmGFm~(C3RL@V_)6 z|LrE^uWSPUToZCWZUVm>cDMzy!k-_2xKg>OjhMrk0k>;H{-q}5Ki!0#yEh@He-rqb z$oooqQe!mZ7yg`RBHyh|$R7j&w}cA;9MwcUZfe4>+BCu6v59=AG{HZ;34SNab*1+H z3v|7beI9E&+(k z-vR%X{5H9Xd`CdemH0DI&@1`*!%g_ZV939coIje-|CuKA@idX|MrP;M!b)Pm6L%F4 zh0M%JQws|+OB`9nj?7FU(>i8YW{$nsJ}IxnVJ{vtY+!z2fqhKY#C$uaU73=ZJtHeK zH?JToKW~;@m};MzT{KfDo|u{IEXdBv&!2{DkVE#A%Qxi*E#aVeJEW>FfMaAIDC8q4jc4+OGS!B=2$#P@~#mW>6Nj%I5h|H2#k~t->ASbhsD;{Cu^MI4N>Iz01~F=*33z2z1c&awxL#d4mQRa~6M&68bNFwG7(By($)hFCHXDvTL^ zss?3davvjqqI@7euV7NJf}3X(BA4#|HGZJ0=(A=yE}`o<>zWHJnfa=gGG_&<|>) z{Je?eaNT-!@7cYiusZ?)sHqh1L$Pcg8*H-<7?|0!dqQs^5Dmq8bq~dRc2A`K*3jr* zL?fs%EdWI}$@6zo2c#IVHV?(G6f-uWlcrHZZ!A6o>)2)gd>YF~qi~OgYnzuBLo0r( z20O0HYeB2i(HiW$p5O=}6EHo2{fEH1-*gAZpV9Uu0{E48b9_S>eu9PlBOd7|V!tg~JJ~j-0_#V!04#T@NexX*&hX+CcUiu|R(ApY9a`0KM!^054XQd7= z2C>*T(cy2@;osBYJL&N2b$D$Hh*8UR`0hIX8XZ1Chp*M)lXdt5I{Z)_{;&=|Oovx= z_z^n%86AG44nN@w4m3xADNgX2tixk!6?}?xcuZx2&kP-2b2UbD>F|8tgC{A|;VJLH zvs8x<_!iNY>+pdo6X93t@GS|UJ*##2Rskf!_jLFe9e%wI-&%(+*Wm+gkjOPUynen| ztHbN(od+rOX6nGRJzFh!`@QehNQA z_|7`~1Ref19e%P7-$jQn(&6 z!z(&`V8|iz86Cc#j=w>NPtoB8t=}X6*M=5Gjn?7&>-bGNe5wu~tHWD#c(V>aK!@+5 z!w=Nq<8}DJkVfPr9X?&hpQ^(T(%~f?ey|QdQiqpx_^~>?RfnIT!w=EnC+qMw9ll71 zAFjjC(BVhv@Gc$xULC$nhaauO|8MkvE%3h<_+JbBuLb_!w!m-VO&28h&(V_C_|JE7 zyzMD>MEGkZ_txkwtl<8{JKh!qf9J0FD|Sp3h<`Fs)FXbsKX)mEX~ClI4#BigQ7c0* zEl|`CLNF~1)mK6=EmYJeLNF~*)JH=wO&ZkV5KIdabz%sn1*Lj#2&RRJIv@nof<)~Z zf@vY5-V%ai2);T5wh2Is3l6n11k*x8{U8L>0z-Wz z1k=JoeIf+YfX1k*x7Ee^r7fKVrfU|Kk+_l96vFsK7UFfA0+o*|eP2*9_!L%SycZXnF2&k1Im=*x)2O*di?&>Qcm=*%+6Cs!u z0P3S5_;!MeLoh7_)QKUO769tKA($rq>VObT6MnU42&Rd?dP@kV3BG!D2<}dBL%-tvVes)V_=hlfUl_bA46X`;H;2LN!r*tp;Mc?87sKGE z!{EhX@WL>7UKl(r3@!+RbHm_=!r=SD;NfBLpfLEJFt|?`+&v7wEeyUX48ArDZXE_U z3xhAe9#;Qh@ToBPco_Uc7`!hG-W3K{g~6M{;B{f}J7MtaVepG#@Y7-N;vlThSVf&o zhU7~*{V--g^0Cp9yk}%WxpD?0pd=r7;KA`*ho$!a1M0phiu_H5u*GT^4 zDHlCkG1JLNsF%+`P0Rh3`z#rjF{8y-Kq}phNtJv?e8nPvXOSy_SzNARh`JW>^)xLc zhVlv}O8(p;?{^;-)8$I}OF4aSnp|$FI2m1`w6eI5h@H=XRZg#6@Dsvl(WAhG=?TB2 zC)B4W9JQRQ7F>ehXzq@A7WHNz7zl!8z!3}^MO?k?~}$qh6eiEv!CAm(<5OF@O&qFo^Qq|W>RS$j|-`YXCaPBIMTmK+{m5M3#p># zZjDQb5+HWcQB%f*k=(tGyw*+CVIw!g%#5*dZOW<7WesP;=(zYi?2tag@o@qpKkSA%#=wx$|Qyn%Rx=qW%PvVqOuF_-A+hgf}8s_5xNjFnVd zqNjxxrvm(pQW4&SI8{}BrCIb?5lQ#_g4zIu)IO)B5RlD5EuP9uwCPoFpqS~Nda+c5 zFHorsjI@DBF7l3T%uG&yj){gfn4kCHa1>ycb4L!lU}@N1^bDjTX>JzBRA0$*-p@gEeHCof3JDh+p%Rl)tF(mFdAJ~f`RE4N2R+D}ysk$f!D^T2NkPc! zs52qpq^%K>lXhN>+OEUm4++ST_99we37m-Dh*i`DU|@Mi^lH@FAqM>7XbW~T*vkRC zjzhElkcZaNYTCW)X=SKsu^N=8P|>SVr_^96=Sc{`dCjyg>uF6#ZOZi%GORH&eC#yS zdRm)_uhefr>i5CD9-s>?ubSrEE^*;%1k_OiS6`7TdcKcQM?kO5x+AKPZd2T#67#kvg@HAS?p$##=Y&=e_O5u0+OPxwjkSm|+yHIQP|pAL(W&tcGz9vLghsv=C8BNbzjM>$j$gr4JBt z--6@Fp_%Bh0U|+>a!iyahzz$9f)a>S15qOZ&}i!3(Oho#!%JAuTnmWI;e60LY_B)H zwnc^ee8*`W+-m0q=l=!4(+5hvb}JsiEFwGVami47kN>dyeCui5*tjX}IsB&Ja+yWX zS7>sOFKk8iH4_B)`3}yjs*R<{0kyyfS@i`>0v7C|&O=UofbzZp)>;rV0#PcM`V~NT z%s_x`uIkIUkv&K6?BjzFs2j~KXJ9zm}9de?D61~e2P*6^cC&Uwki0OSDe2buW8Dy3dUm)l>iP=RamJ=5d;yvaT3lOEMRUc>7P|Uz6K`mimHUpyt zbsPi75=d446Vu5`AXU;%$|)uVV5<5tBi+Fg!d5E*RvT{z2tx?&8ozoUitPF+%4@s| zNQ|c_GYyE*G_7=@Pyg*XpiASBA)%vW;}Ia#>zD%j5oH$r0AjuAd%eSItBt#WL|q6i zYTONXi=L0E+v;2aq@TKi<9-Bk;!~1odMG#y+(b3m-`~E(C-6 z2Q;iUPDhk+oB{}rb5j^c4sm?PQ3p9{6rpN3s)D2LA=F1qP7g#|Q%17wdYPl@p9N|vlXC)5w#WPs=Xi;8>?Mw&95sidDhQRtQE?pg4xu`8RBMiU zo={hF)U6yi3+Zo^y;Qjvb664N;8j=#v?k2#`EF zwZ6(EmU`hh)yB@iv7P7TjOA*8Eh&Y}z({~>pL-Sqe_zh;YpYWLq6#+g9Df8_9m`QK zbJQ+Er2~Z_(VMZ?QlYeMQE3s^mAe&#yP<{nm_!Bj#dM#_M#cq_p#~#kwebn0qm6j< z?bCYt2Apv>h~mNr;M4At6WkX@hzo}!UT>TZHkatJFyLMQ7|m(!P;JZra6J{^1Oo>E zL^ps&&cI#(ncrWC5=&>3*bC8^5U>bld>=BVt2HKd6$Aaxz?De^4S?0gGXQDQ$;2aKMQ_l z{5t`97zdcJb4IuR`s)~-b+BgXySy(*}=7jPsVn-Js0UIi9 zo}$2b6$Qx+j&|_Y7ndQ9Rd);jVU176|05B7iQiHXcUi(_NX4oCQbj|gWT=w9y6k8R z0d`G5wEr;Qw+^JIttD5=Yak2GJ2NDA$^eRDJH^pT@}|rKptMl25tD<-VwG_&V8eK% z74+%V=^$!)@vSQC#?VfTBwwVnJJtFDtSlXG_Sej8)=e~zY-s%ku6s& z>^2~mg`J16aN2I*ZXAwy-8FQsC;0}LB>5UCxuV!1$(6r}g#*rjV`r{-dRwmeLrt#e zr~q;ZkdDuRtjrbbDssjB<*?co$u|T$M5DVT?EX#MR4d6t&5~h;tRV{#dtb2Iyi;PW-eD%IH$B!` zoo=Qomf~t8*8I|x@8(UKe~$pcLZNan!vxBe%AGoa2}}u=-g|#<$vfEW**$*{b3k{wscJCX zGPy={D&HP@o{;UemS*1G<`!8`<)y{z(9WfAyUt6}1HvDE1YP!L{PXKP#qfT0d(ja69#89Ea$g)LDwtNaIb zN9-O!%A3er*$PRJ4@-Tp1Q~kDowrH88~-_injD?$ZKk1|w$HsN0cLZHIG5uDv}VPt zQWB0bvnVaV5o}+SAM5wzbJBvJ4=^E5jf6KyvTrN`o9vmu0^ei=rR9!W#W)Y`n2T}4 zTw+|tQZa7)N-@sng;P1YYd&`_fI4?;|sSY7hN2v79_z5S4tM zREDSJemuuhAE3C%dc0nav6g{yYDWu+%P>4bbtV5Nzkj^sO-+@2X@WuXH(sXSFvv_L zHkeUK10Om9xe~UQzG{%pZNX9lE-}7A64NRqc|a2L*zH?%l@aiF99ATD((=1>gy)xZ z<^29A)AHL3K3yi_Q>oS~We_aSR!WHStRVT~x>RfZ0qMGO_u;mU71H=wol*IGf&6m* zl%MpM`Hj)?>#F4^{bhbAzs7&*M_>>D(0*bI5> z!XAq#KiFgFU+NdM$J{`EWB!!ikiX0?P0O!?mfw)SmtRaEzoUo$gMYVI2M`)B1)5TG%kSyBM7RGMi9!+Ys_L@(;>+luXNXv#F5pQ3E4O}{xB?e*W(yQzF<|k zrEX1Vex$cftmJKXFZd{ng!%+O<)MD_J^JD(NT&YKDxa7A7J0WtuCZ3AW~t(&*(zVm zu*utP;=A8k#dmig+)o{Fn*1H2JFFFT<^jGj%~ItWi4qtNd;*KQtVn1xd;`#C!2Ar3 zWi!-D6$%(?^j%)SUhwDNs3W17xOYYuFZSx4M#7fA0St zC1Yaz*?JH=;n>S9ZA(@3)kVqk-MlDC9-B&K*Sdz#c%$`aVRIWeh5LAO(endUBF;QZ zzh(_B(tWrirtdgD*#`jbC8MXFyh&4BA zORqA=&Y-5F&B2;!?dCivL4OX;AE2w`p3N6dl6QU=N4m1~I}$%To+egnVKZ+_KwYHj zfi(CFp6$E>hY^c1hGjP}p63yob1Fk2#Cr`T^z3&2h#GesL|a#sTd97~w6z2Qe^y!o zU$>@_ewO>Jay{DYgaz2_#@y;_%@5?yG39Y#wx@4-G=XeSip$?(0aLz#+P&mHa#3vK zS@s9ezK`yzTg5~AdO$Ss|9CPZ!Wr?Ci-kr{z9>Kj6x zQ(BNCdNXQ*ZYFwuC2bdMC39)6MkkTfpTVgokf=tgv(l({!x~HBZS|F?>P~P0$r};u z%Gwev?^jwOQ|hJfm%PlpjRj$)MRgalrq)u)qE<0|Z@{YZQ=|k9rll&altcZCjT=)F zOMPWEGb=a4S@as$Co+SO#gxSdNK2+jom)8r-2&rNa6O7)%9mI_2ll|ECA?$VybtO5 z(!<#qy_arbI1Vn1{g%Vbeq_5+QUTXTV4Sw_RJ0Ar8|vx}K+(we^L}2g4wwV%-NKvZTrX&@SX#7%%X9jp$ zifv{i7-)JBMdWAI*s1aw-*!P=U*aFB2)sDQR(^(sbsfl3>0C862xoJ)<(NtrOkfbx zw@}I3JlGuw1iRH$}jrwM!H(=GV1xet+zSp1v6SJyF`?^Fz8bwTT?Wt4%mCwO#|* zCLfm%Q4!Sp&~`)i|7(3CG<_qkpzq>4q3=Kb9n$v&RD{kTA$^;kpS?n34UGQEuYb13 zsjDFW{O=+8DgWQ))3`wL2V6n^4>v;ovpV_Tqn;b-&&C?ISNnTlOTyjXVDlDsadcvp zgchWHTuZ7C#PFAmMMUrPxtvDMN-Vy#ABO=YcnS7%XfPmE>cz)=_SUI9qZyPF~JZ%#e+fmw{%It+O`wXKJ?cK4lv^ zZ>+gYbsKbflU=wyN85b%12`?CRj~VKGwsdjr#iHI@#L2zA>#mMA%`XJy(r|c1E?|Q zO^tUmIzR(6vs55|KEA??bzdusQKyX*2-YXO$vYVJc99)CR1Y?{rP9PzO708Shzsw5 zlSsG`TJ{3E87UIJB3IxnEL&XsDG=5Lm--J71#t=X$O#Z{$25G&Nl<7qc~&&VY{ugG ztW~^Y>sj$1#y0`viajbscXNn6D~VleVIoYd%lbih#i>~F-|5q97A0zOWhXT zGf@3!nSOhKqn6xEMp^F{U)!r_TubJGD^TVgR#vz>DyhsL0pMjGig7|K^L~_h+1(IQ zahl3ZXI>dF0hMvR|HL#2Yc9c-e3%OCuJwnEK!s&xN5N6(`&pqq&<6eeMtYZzoymDe zcd?#3SQQn~4W4X58dwgYuJV=)B(L-tP0(Qg6qtaV6UtGqEyS0~v0rL1NOw|IS4ww| z##!xX_@mkF9xoH~5;NN{p3vkplbV-LR^e)C^YVOA8IG&ZtU*6`4&2 zKqboid~I>!4S;I3;?zyhxEW90`w>*qO}u)GF0s_Ji}@$$JU z9E=!KxDt>PAIyIK)61DRL6}M29Z-O~kT3B&N%Q5OR^E)hrbfuxB-I*gSu}l-(D=@su*zRiYs5b2_13%!YKp_q&BRqu zg6uawTz5Ez5-hoy68KW?fIVOcc)vmNSt6`?U$MGJwdDrY0V1M#lXXTDW^aqyRG-XR6IPkMX;L*se=xO>_q&` zdRk*7?6E=&v3R_FIAVKARb#bhL^% zVsPMyiygDjK?Cj>sUMxw?2hg4gh%+L0Oh@ER2k+!j7hFiO<>~kaQ-=ktaVJew3Vur z4h|eqBa0u8@C@mcAcPZ_=ed-Gk}B8x(e4_3WS1v-1e;e6Kyb zkCMEjWE9qS-Ozv6i5SOuABOu*I~@JQbrG(9&FH=h?%m*?PVv=bQg%;id*B`xCYS8y zPFE~`u&+f!ecd-Dy#UkzuP(1|s8W4EMM{> zZ*Nho&6kHI%w*_M@0|AM+}>k(v)txtx!o1aEilQ^Zz_tOFDVJ1Vbl8|+Tt6k{kt0o z*vMM*TR;c*oc|z?6-rS{Z=|e(G&Qh&m25}eRgZH_D^w9zO8nk6eriw{e9gYLp67x7Z-1l&Y1RZP~wqBLAO4vx=y7bSr^aTy~+$V0)YUx=t zP39A9v1#?Cb&3u6gCvh%Zao*(NiahNBUuw&Rl18EFQzZWTP%Res?*n^9)+s(_uzx+ zD=DsA11rEJCgDxL|FqxH9wur?AZ1Tr@paolW+Dt9I5jc3W>^Jr-N+%LXE1cb(#>V_ zb?b$XD$pAs^Z|Dy^0#{!JzOa~oi59U>IbVem*kzhTwEvg>me?A8xG&E`Mm3VBO6>7 zZlB&pT({erJ?1IFW;l6|{|Nd4xdMuOh;5C8-R{F;xf)fSzG<8@K3-h6{qeS~AMcGR zvU5|eoKe298I__NHWk?TNBOYTBu)E~rJTOh-)=uTEvV#~4ww0}q3QBtn4#lg6yEqk z&4JD-qS!4m(X$8DiRLgC%YeY^Qrfu9Hl?8Y@jIZ^GiPum?qTM?0elVN=M(C=hck?3b>EDN@efc)`9M@9SjcTxUCPaygsqSHQaD1E1sh}4DcF%3>k2RhvF`=2H(~@X-HkH?e6|)< zkO2>IVrFUV*o)jNWo;b%4z*5xI?ibm)6azT!8Sk{28rTW)sE`J_{rwJkbrxgrI|os z@JJWeb#0bj`itWTyR~Y7(^}le@S;cq^0jFWi5u=Fi|Y@ZsEfGhHT&t``V%E;l=+tGP!7eV`p2%EFDML!+^27IV{wyFC>AbyuKm=|{&t1;6*X9ep#ErM1xk;MNwC+G&)sg|X5JPqJ z59$yC^Thxy`GWXp76_ETTVUC816-udYH9;iStqc3KvMMclr?qG^#7jpLyaAUV!V?U zAER5HtfOwb%w{-(e%kUG%7@(-Egji)m0qlg+1v#l3=Q>48KY&m)c$(>31#U3I@F!r9KT<^AmZW&1dv>#yC+9-%c$* zMZVI?#H^)x)6131Cn2RRYCUK#+{*D82a+5bZ!|ekLL(7v zxd`mx1cse;kVEyTPDT3&bd9Gvp{`a?4j-Tm1h8Kdb|GO`=&&0ITSVA}I_yV;eSok< zI_$HAO(X0BI_yHib|-9_4m*vov4rie!;T?r1h7o!4Jdy=yS_wG=#n#)<|Q4aJyCWA zC>|X}B+A+VWr~h+`c_b$3s6SsC0^a=D}%oOor=;(+!CGMtFF}TtFh~*eUFz<+^g@9ZYntiBm z<4(KQ4in#m^n~4XSLTq?8JTEDQl)`>W;40kkreWo!r*lM{D%HF5}wa-WTngJYfR}` zKIj<#F^;;6l!uWWyYXm~&nk)7vO-h0qQmp1nr$+sTdCUB$5eLpQHfkf6W%H|-^Zao zI7;+88L|85I>6X?#2%vi025LX%S|07J35($$v6zb+egivF<4hP zaAU`>eEu;lHl~H6*K)KejQa(SjtxUE29pVC$2LpnS5N>~4vGi3&I`96mx${n3O|W?fV+8N$ z#+cn;nx2B0|OBFI~eE zna$J0b(4o;QY4mEP$hd)yGU z8DmBiG?nBFT%2+XCdYLj*piKJBE9H2fvV#ro%tbekdsl5tU+c{MAslk5X{frQ;wmt zd6*4jr8K-zF|P~cw+tdf`9+{O+K89N_*w+`@*DBZ!V*Sp&qQ**4dXTa(;D%;5(9bs z)gK!2NW(8yC1SMB6FUo6eBr zxhrV{QO;N$w#Zfnp-Ske)08WA!K-d#VluGDlq+_RnTk+c_lP7ugIC15my6GpM^~0* zEEh{xLATV>deKt?v%6-JKfNDKc9@BCUNp2-1o$#m>pc8NWI~5M$?2=b(gmP1U&QTT zY#m4#@~2}ZiERWpIQDdQiAy>%d2?67pOkhOH8?wMdtg4L??opNqkHR&Q2$An?Mumg zP$+$+%`352*E%p5cfTAttTHrKO{YLW#$=Cbp4Ld29&zLm9jA|KV2-!#8 zA{ojp^5<^VAbA=br=^OYTBiG^)LS<0LMn@I0PX_j220$vv`SF@?3VDvB*2tzX_dbpWNar{x=*iEl!atPPD{gWH6@sXT>{kbo&qE zHP{q6i;eO$-%@(n*fmrQ$-QHM>%(I2?s5=H8LfX@-;T%2fA-t>U5{;BjC0 zQS{A=L}hv7)4VeUz+^X92T6XVe)bj!%{gA1SBQnnG*%S6%KWdmu2q4*vw4{TnRk)`4YI{%Y~VZK2l)8cS77^BP{I+yfO z{%qMSKSld*QrwsFH|MZdI@d)^drnAk)z&?AR$nGM-cLzq$$c8vom4>a>0b=G~=|e9mUL%qFHcqfY5yFm4~af>BixZOz{5$~V4i80hQJm2H2=9q^6D zO=od^o#)@sxHr&;0OBgZ^Ilr=j}~#nPD`1=ecm80 zq5rOp-SB7M%V3UXVQT@)C!h#v26bRn)OB{xcJajupRL861It>qabLi_hqWN%v}|ys zl()U3r*0Gu%Q~m|M#WFrz9PM>ZGx12#<0VEp@q1(jxm1r#m=2O=?GqY_e-$e>YEA9 zdP`Y9&M9T@z}M$M&f7jIH#fJ!-%+YKe@onXUn|+LJ#L4)GTs8A-tkKfTjT?3JLV=@ z|0!3y;taMp+(&5;AN>gwygaJX+qb-)Ep%Kq2Gy?l)n=^eg$-EI3+pMSuE*@3t)tz4 zT$tN}G+wd`iPPm%mUBPAvWtI%{!;dLHs93uZ1R_=K)L{P+q>xQv-eqjjvIxlsMzk3 zZ%(X1F#&7AW~_nl=`nxeUO<~n);&igFKcmW%qMKg<)XVMQ~10TS7D=f!EXg;Wtm&db8)~HFGvcEP3ZT~EkJiu0=Uv@lkFRXW`^881 z(Aq}+61Q#w;|;45+Qm^P;ZN>TpT&>C!_eL8dn7{g&3zy5eUuDcjc?73N4c?xF=3vV z@qSv_we(pn!ue(zaQcU;j<97PLNyP|-mQMkjOcfK(OI1`tJSggIR2px_`@`f> z5f-^}xOZIphT-zKYa537MqOofA8D|~?L%J?caEBKLv-;fg!l%KdKirVauiLEgQST8FDRhTZ3<4gt`pHSsxD{Nsl{j_05d1$-|a| zXc(4V&m>UKoueg2e@K}O_036$P()cuw=nxd1P#eDa7p&51vbp>-S^NtN+$p+dcLS3}UJ z5QOOpX3u5VrB*&93VI8Srx=qkiY=${S^W<7(4A;44X#JDHErPC63=B5tYQiUn*1YK1O$c%m8m z2@Iq?p(Fp!rLWzcRPgctBAdsw9*v=a-abx^wkBh}LhB%Qw~XeD$_%_vY<170PxuX^ zoX~qiB637e&W_3GKYi79>hIEyM81GU`H_fwUNlRB6cBI_DKSFU*w}fkG8GF5i#t6U zr)hS)nWLMLY5fD;i*g@6&6hZRG5Fy+P$#jp8)7IWRK1mAu1l>%&vl5=cK7x-Sefp+ zmz7Dq9q5q1TD?)7g4fFXlVQ9uCgP;M0V;bbTa|bdF9JzZB}jz7l`AE1Ff@ML{B1#t z5YIm~qsqf-l>M+Gyr70|(|^w5?Q%w;oH(4e$_H$|n2qT0VgB_Lc<&lX!w%Nzc)14N z@+}01^!u}MuVNM(iZe(_mJsCJ}Zk9JwEsQamOv_S#{4GPJ%&I9aIV6>id zH(+t>p$_a>NLk9%qBHiQXyrX5R7Gk=9r-;^a;BmCNE(hFJ|{JF{3gXk{4W9x{*d<# zIRormhE0R_!Ww91ymWqC+GSpd(y0fK5tdcxL9pYfZPh#o9Q<0nmGWZ!zrMZUtugHo zS=$Nd!VitCsBye4Dg~ygBc2F>qsV5qf0w zl%$LIibcvGEI$MtuNCR=YS_O~aqz;6u0d<#A=jTZq1{wt@}a<9=gT5Gq)U4PhM{#^ zV+k}bfU;(_JDPN0UK(a?$23i?KwGgwttKs9TE$gRjPNA&BLduW*9+p}jfm1@_$o?@ zi@4Nw;JiPOcD^y4aC639>)RVe>Rz1_XXwL8j$l8lQ2Rle4h-1E*_t!p5l%U4Bhhm>@I3?QJ2yyE z!+=LQ@F4@79C(odvp66FaGIraRal>*!O-5D1$PK z6HaTlSI#$yy>3V20MCh)bBOwGn53cXi<|6?8VrNYTcQSh?V=GV2VdsJrTx2rvb6t3 zg~Dq#q9=z$(KbBw<;q;JtD}gH+w)!EErL1_P^f({f7KS~*wn&l^~xk)rHCZOo6<0Z%9p5O-vEie1*Ck$5+#1iP}R!oT*kY^h&jVx4pxDx3?-4iltqkW z^dV5XaPlT1cO^20KVRaM7$!cO7`?_ZRtikYrWYxBqVY3gt48)jS8_>$H|2iB+1Ght z{n5nXOPobQCvm-#K*r`7jwXoI?pkD}bl{{L8R-b9Fx%=)AZ#JacvGTuB5QH63?frV zWLuu&F{l;DF^ldp;i3GMq~pMuYZGi_hHDVVV+uTv>-KS-B)&vuqN zf>J-mQr~#Fg_e3gOZ@{n8dFnh}>drj%ZlqAGoIC_%0coJur$p<^81Rai9DG;a};G}OD=`7?Q zi$;P+xh=Oa>`7n`5;mV>-)7h&!0rVWd2EGODkDJDRJx4p%fO_}=ECP7&PYhIjwk6$ zYK`OM@gN8MFqzKYe2L2`yA)2ohoy$TzpQ7pQlj0=X}1JuFLT-qq8T}@nNI8gPR=3n z@#mQ_P7Km2yZ&P)b~Mp;aN6gf1vGkwlT(TO7RXpJP@J`lSHWkKcR{yN;8GUx)IMEm zm5y2P#S%(Am8CX{fz)=Ex(caZ1(!0Mryik8ednhv^>59Qx(6r6vqZsu;2XH2(w39j zF%s1aeLY^;%+27(baMlC7peC^R%PW>gwDr^qZOrN{ncgy-=+k<#4U)k?rDqiJTQSC z=iSeckPersl~vCL(!EaUoI0)z&`0B%7v%B~*9aZgKr~y8Yiy9qPF!(1uDKzuome4_*l{fXn z%^>e>kjRZs!;HVujwu=d;h*A303IMPSC{R-K4EOt!1EqI3(F)W2MM^{>B@zYND0ub zLtwi%aa&Qd%soM_7l}*MaeWx#iVbqPiAzPP0D$_wxu3{+20!atbB@E(+LHi4(OocTt^F@aFuanv6{KP!b<;?RjYuPibhSpho|6iAs;)#j#|&Pr_$lQ6JW*#YCm)zi}*~M{DQ@DfFdW&2>y9Qj$hWWhr1d9C zHPc9~S!mo(ILc2pZl&nprzrXnMf-7|8HC0s8($*KNro}LiY(yVUBAM2cv4%La9hzp zTTqxEBa;F!Vll}-p~*j^FYvqjv`0gfEr9>dGw==}HLqY=R(@ViX5qw#?b!|?ccL>l z*Is;=d4$tpF3dGgEOZv+n5P%#Iqbqzdr3*wB)i}&$jZ+z%+7MybIe0Vj~IT}U85ZU z1V>?JQE_3mFp~ZhIP>!bN3qi`%v!K{sVlSSC#9VBFy~L4MFo|;OoS$RP zD=|9?3(Yxpdyz1Et}yNvVca-jTn}N~L}6U6Fs?utS1gQk2=NI$d-YE2(>EzOYhree z9TaEJ_@2o;ND?~d=e0B!6gteL>U89lSHN^7B};_Sj$-5}JUCu>xTLV4M|R=Vsf7i? zpyI4aQ}I6;26uP$ijEx`Kg zRWi8|(7zD`aj-62i3bqEz!V{SvORlBW_Dh2wlhDgxWB+^CbJ-`Ko=#gv-9#Y3k&jR z0v(K}2s!p5$7KFDMc}@asTnjg-(D~Y#4BP`gh{+6?ZF6)Fg;Yi!|aa9g*hd{2;KnT zAIxeJmC{016|4mg`y_j@Fetw;i>fO--8ZUpbPil?Nt~v*>0vHm?zn%nwj{LJV4X4O3WxK%4nXR=a_7E zfUU%llb<)yTpGLthC;u8`QdM5Nr=$(+5&?lj9LQ+C98gxR> zo;`c@?Aqna-aA<34MC@>D8xqpTs_W`tY|4;qS{1;>$K$>3I$ov?;m+@PT zEmAK+rxqvpf9jX{Uyx}HqsPpFQ)% zmkU?kpuxx1+|hpdRlQFqYVhI@67RS;Dd~fe8hm&Es@;e7)|Ap~7L1_1{kNGhV{_knm9Fd%|82i+`QgciBfp*2 z;Po#5zgEBf(MMNZ=5+6nIr+xSiH~*G;L-6Fy(joLZSSwa8Mkfx=OWMWrytPZPv^xI zb(((ukVAu)tUtE?rB4sNzC?qU&2^N0nfYeJn;QJ(^Q)IUS(LV6s|HVLCw|s&Ci?M1 z8oVwc!Jg6iy)Wpji1b}Ee|Bxvk^axN#oiLZ&F;SQj;*F~N4snAo+nE3p6%4>Usern zy+d%H{phhjvNgEw`ZiDGL?7EUPlI=sjVx@jbJHR${+Yb4_Un#}ZU6CCA8Ih=0qd?d_sd$#y?%NZFFMI z8V&w%7*TPnj#9{q&_lKm4e{ofjp4*Sqrh*Dh)BxR;i{TKrs9{k7Pg zp=Z}-bYF*wU#!(&Q`)LGY_@5i)M@bbD^k~;Ui_A~ znO31)OYeW(bh=}$S%c#e{;3{YWqST@4Su23ikB}wZ#t0?p#OgN#Ba=9))Z;5=gT){ zJv21U<<;PB->->GJ$mzJuW9fSf7(MmR!mx4uECb|i&nhQzSrJwHTcaNep>s)ch|l6 zZw>xt`k{>lpFLeCVnd3aDW}dne|N-V@7|%o;p6GeVb8d*XKdI5x29)4mo5mi+rBec zofT!_@Zo`+e0u)nnJPlBwkr;toYVc>^jPZr0)Aq!h;A&n%&bAuMP(ZXxA z5Vw{X{aP)&P77nTklGfd?}%T_GQa=n#h6p-ezSoim}nG#89$2adBWqbJ{h7<)_4y9 zw+%mnDIT7NV8RFZ0`!0^3hDO&ek3PA*N4X-n`rxV`UTSW0xpoA;`(&@_?9r5S%U+i zo;N%m$V-o3tCM#Dy3tQh*T;O-uModC zHQK=^@Lil6BJYlegWKWXU1wnz9Pt$xuxO)y4Z=kTTe&bF5!Y>S5aGQv)1sIMbjif{wMax~b@2u*0P z-yocf@Hd2OsR5T_JT&2b2!taMUWITqLXq%j&>axk5N?8PS3%F8m*XG`;U$EZ5O%P@3=_Q0+5cXSvx<#0ea5BO*2+I&2 zM7SDZs~3jM#7}F%sb%g!u^9Af#(32NA9!I<745Bs{`12rnUwhd+#a$L}A5 za1+8A2s^xsd=RccSc5SCJ-@#J;m-(D;V=2`!;T0SZ9x4YJcw{F!VVk3hj1Li4)CWH z2vZRrM3{qc(I@aXgmXTHoe9R_WIX(9CoUC~Aw0O#@2^ETZXe16|JwNt%8l^SLFj>S z%^}EzpW$*bdknMg6$~>>h8tQnkEYk5Isr@9#SctFI{1;$+BB$j>=3c#^k|pR|N7)R zdUd*m5>om!{LHiY|0P5?dI*pAn1op1Cs2CrA-N9xOlYt4Aw(g)wP|6*z?f#~zQ{#U zZezsOtDu3UC5>4t@JpKS_rncQam`F_W z?#P&CR)_;g^q=t~`Ur%aF2yvP!}IF`6n)&Q3)P_`~7d}c_4%2F9&S} zXrZz|{-97<)@yVNr485+X{UpQ(jNeQ=U=36(C9tG(ofdX$6`|QGtxIIpDzC-&@X}B zRQj=?cfg5XQ|V`b-WBv)gZVcu-*V9D46!Ns>p>s)7xE8)p8pr=8$e(57wKc+n=3$X z%DzdU?*zT6`WdUy@#QJ4enRCV|DB=H(M7SxjK4HK)j;M%!szNWVr;11-vduuj0Ho& zc;sF@x%-lD{0^R_;OPC4IB>1lldlC%Nz1 zV$hWfX8htoD@9s-nlDtY5hQy+%Mtk773eYGSqYv^K_0ZD0gEEj+))v!oHb%1h3PF> zTPg*AJjOFS@!wBvz=OlWohWk3W!s}u*LeJC{*sP`Z8$! zEa_$jEeCQ^(O$Tm!Klk6k=DggKI5WhL-8%%WCKal>T)FbkIvNDqdEHQW-ht05vWI-C0l51h8$+jNbBqOulI*sae~bkG6ru%mgij5lzT3eIOU;FN zQ^C9JZ}R4Y_rTxeT@Kz3m>c~a9kzmZLQ}jYr0<}Xq%YMc&8=2}clMRa#_d0VF@sKa zGDoAo0RJ7t{~)zZ^kW*WA865-uU&~Y7PJmmpmjo82Wau2h5ETcG0g@pibVI2rd1pB z^A+Hm0ludE+zeUe;8_cvM*Si0YiKBBWuSTDjOjX`jDk<152W-0I^eDUA&*YfNn`%` zS9(a`>Gu~rT_7SKJTt&Em+}qF2?D-f$o-1?ajK`~;Qc@By=Rye#TG7Hy}Nsd$uoO~ zIN(IWATZ=4Fi6fw5)lakA|OFPC4&J)1OWvjC{MFkUr$4JJA84RG9z;Mj^z3-~3 zo;{$R`{R4=uiMYFcCS^nQmu+z9rjM}zL>~s#?WqL?UOvh#)`UM0snbe8=qf+U&cXp ztRCd;ijKuP{Q6?#Dda&oD*~rFidTnf3CJbtm`~*4-86oIYggD02>Z zxQ5R?o+n{mBF|UgY56~RG7zJI|H8vqHwDjB@Z1hO(Qn;-x+R}W*D$)W9!G-r2k;)| z*lx?%iQ|+W+TA1>Juy#r2}9r$V1_-MZ|0c{o>yYQ+ z8fOODY@nx`F0Xd)JL{JCwHN$9Pfv(nf?p>?#Zlxp#q;kI68$QVU$;vmp+8LZiM~8I z_256~WBhmxHxSocwG#DpqJN#2`CXQVQ6FbZX}c!Rp+?RI@QKGM=`KG06n3k|KQ6My*;4IbvCD&u-0 zkuEuoZfa-@ulmT-Ck#1q>|CzTyeGlq^X>X9^Ab=FF+5~G!TD#C&pC_-ml-^M{8L}r z9_6@*!ikl49_c!B3|S1z_MA6%I3US z7BuDQGqIP(!}^(F{NuF6gX=CH_Wq{j`^{Xc+c^Je=sHH*ksX3RKb3bz)Y0gv4d7(M4Fz0pV zyuqBen)6{r;X(!%>^SnwxzwD?%(_63t?T#(nONdr8mS-<%OLN=b?xM_pZrgcrS;`UHTq;%)pV20AF1C85t_1QiW zU{ZtAM0~%)SB?nJeh0}i7V>A4?fr(gZq%z*1NVqMe>@(NuN#dt&%+MPZ|K8S>s0Rv zrn!eaEGvPu7>}PZfHE$~lq%tA-c=Z!s#H<80Po)bBNP$UKignl|E?}JPLLGsf3v|d zR9!F9CQqsP(Q(Wv__%)!wR1)pecd3gnc$Ks*#VDThhnYmihobZM%BHLfFL!>6Rks_wQh!g8A@9>BhnhP{w!Okco$Fqt-#Pjgu#eYUq$&P=Co>mlp4ZcR=m*Cfl--8r4{vKLRDE<{Dtd#hl zm=>ez%;#{*$S!lPgr?l}C~0=@WU-Tb3+Bn}A(zIpd2sZN+=Q zStmXXkqE`xA^dK9EO0O04x0RU4Lmjwj<130NPHjCqwyQzUo3tAQ+8539ZfDdegr{J ziD$t5)cAjqk`^Bhm(t^R*2Epv_=hl?6@S2osdy$rnjN1G?{nha(3mU5_rZ~B@gJeN zdi*6w<;MSn4>jU1LbhhSRz9CgX!8qVFY(H}J$xoTUyg#Ms5ZAi%x{E3WNvfLhr0L( zFy_TCMD6Fr7vr}qJ{vkm#$$-!!X|Of&Q6ovTx~c_#zGNJ*K@5AYO)=_%4^abjafyT zwBS6S(WEm^c})iJRMun^PYa8u@T447JWr62D!x;eBc3Y0QBtC+_-;WmRPp12Oru*KMtsk zmXPgKmbNpjO8WOP-YRa1cTH6KuUX_!y{FEYrb14<7Kmu?Q5VcqIZirlWT;D>jP$Bb zjNeG?O_A!35g9dYmdbas_|=D~8dxR$Yrhmc1KOd2D>=#H#PeusCp*bBr_OAQ_EjhS zaC8v3#;-xq(vPr@^d2=`)psJ)k22nkB1swZM~4?7DgHBM!ZJ{13{kZt)W z#wt&>;rJ{3mB)rRfWOHmv_ZsVB)h=E_NsU=+`$o2ZQ}Ws^^P#_dB;b?yL2@$TKV}@ z-upg_A$wIKc2;|Y^sUP5UusyD%Fo2AqPitum7mk4Z^;-{emk9tqAeM{${$rDSB5H} z*uW~4pYYp*K8x`XMvCyRo=)LwM2|6 zAB)sPY6iw+q!gT&u?SSb68ub9jURtKi+M^D^nrqBSkndk=~BN62Dasuh5*m#(hKn` zTp>i2E<52-;Yyv;W)(sqVJw>K`~+?Bt56S4lSkRIoH*|Y=f&^BZ<#8bjEEOKT@=Mo zS3=gVJ>he=qAk&(NXcyrfua{PIl!yjV+d%`FA-)$+jh$XQN^kLDyY)7HQ`E8O(&UN zw`|M7t*D+Oqg$C8R;TC{e?7Im0X0RhS79_%?hf?pqW6V}=AML3QgkSb&k|(hjzIq} zIwm<)a+`v;=%@52k&~x-Z3aaqpYw$(pw~$cik5SlpgrM0&gp`#K&W#@2>L5xm@`Vyy5t=rXfF&2InxDw1T~ejfar+J z&@;koou^@`?Za>)e2bGrLbaU^i{V?H6ow;;`8f10_}iR`#v!#{oFD!ZeDUolvnu`o zqc0m;@p9xdipBLLrBoGY==Wz|dK{yDOH z<0OlQxj>}K9CgwG%uHDg@)skifFdXT22ZluNW#Q#(Hy#QhaOi`YT{jB?QW>QE_dD{ zV~=c`02Zgs%Lum=!>b?u9CGoJVpxvfiw5VYuJ7i-C9iQtKg2wj&A@9?c{{jN?j(fE zYZ`9}JSwAu*G!0vTn;5(b0MnaDwycgofM33ASWUDSjPuzMX zU#@#G3RHD1!s-;HvG*e0`#|BS>ZA2I1vxskicPIS%LO%bYKBcMMi88WIyyCKQy;{T z<`lHlsmi7*^gySeyH5Riud#Lq&Q8G)oqE)!-iEQnDN6oaJp0_HZey1yitE$^Hua5s zrq)xMfG*+D>n6Mv>B(Ug-+dSQ~AG1PEjYFy4j|l&9y*L zADz0vrk=;L6`M%jV^e!^gP3u0t#LZFn@w%Pvel9aBj%Qw)WT=!Z{v*J=u-u+<7dL> z`0?fEYhqNQ3OMc+d#wera{oZcyope5$EwEkX_$<&%UwW&?*#J1kMzD_MQ zsa>kEHYeonfQCqyn#CX{3K8vk1TMC8CS_E~z?o|_XR=A}IWr_DFSi5+oC}1gliQvFy->PzY3__X5OZWqZmJ5uWMCG$x1;zi=u}f! zm+|;7Rl>iPuDYBZ`MO-Dqto9E7971^z|U|b`TS7@^ttNL5IFCsGD|q?>?I$v`hu+g z723}s$11r=XVpI+!%ktyrru~$Z$SI`%&oz7PRv#G3CD=(Y{QkjFQ83UFivGJkJYZ) zw!$RmRPoFtGa?Nf&V`XI)Ja>Z+lwMnxvH&H`^<>0chzVZgo`j+u-!BPw=^JsGg5u7 zw^aU4jHI=?O8x-mvnGA6nsrmlpAN2CD z2dRU2#a#ObQ9o!18d#k=!ZCPs9@0Y&%?=uZqSoy#>G_|dgmotheu!E)?u%b4KM8hA z7D(3bfq{=9yV2@90(RY;6zA$ zXBkcNO+k}se3s})Md#6xC8i3-<0ro{N*;ejoU6lpshZP2zzou)LAZ60l3x{5f79

2RA|Jt&(lGAf_dDg(sd@J zC@6RuIkIiFr=sSw$DkFQoE!SsY8|2)^#mTjNNfBvU}x-AqbISbkMs?RlQ~#NY1bRI zz);gHuAA@4L2~{fB$w zqpRUXi|d_qHo=?8BJI3Uef0T=aeR|amR_ySn~Q#T@?Fkut5Emd{Nb488mtss-v?Pz zCkbsW6K`w1mD{RXs+)Nis%m@E>(k7{%hM&-zjI*wh)P+n`N&qnJw`O5Ga zHhG9TuL!P$KXIlqi3O7%mH!nQdZS;ZVXj~nTWDfFr|5&uIJz!W(~C@cenWZj9<7Jr^BZnz8D* z5@^dOTQEV~kOE8HbX$LSEO#KAf*~TjgHdvUR~s;f;>Hrz5r#>LDGr#2Fg167zyHF@zXEd%E8O-@FkCDNJ#(i&l~udLJhg6_?<75J&9 z(}n{b#&t}^4JTz;ab`|I=nRWrz0QGg+8qhZ|?Iu)R zbDg#k^dn50fHu-fr#%mPE7R(rO`M|B_JQ65xXO8V7hbzv4itwIV46nCsAy{BJrgR% zbv*nGIfF1)M-H20R>ESP9CO{3Ml|xh$!5*nZL_~+Ja|OHmurMI{gTbLmrp8^WwO~u zKC{{BR(X29$!7D4v@rx*AcumGGce{>vAmHU3E8DKJ2%Ka!(`KuzLGr%tFe9BI~5sg zvf2D6+w59_^vne5Wj6cgy;l0N1nCECcAG%@PLs`Uw@+t>rfdIyxz_4<$}q9pazK{i zZRl*WE4^c~YhW~!tUgA4$Ta~Qbbh6=HYx%|`@*QO_z+vq7G|byA^OQv9b$u!@PcY0sS>^?a%zH!JX7>56PZUQFvE>5hPjVr zUX{pPn!t<#jy244Ec3cV=A#{y9HzNnW|;4>%nu|o?@eHa<~t1Yo0j>}MCOGF%<%bn z!yL*t1$;7*d1L}JG#@m~4K4GYL}t~kg3rGj=E0iT&Io(^2V62EWN`(8gH3gg)%$I^ zUdX9RO%2mMmf=u21G-30t%AX7q9Pp)GiU8xA~49#eZLzq6&aMk^j$@!R69XdI?pgw zEs)^YB4m`?lOoJX(8bG6EMu&}+(6eghG}d?U1b9T-$mDL3A&b4)RiCT+H9B}si^Dd zak^ek(DhbDU9|&U9~!1VD(dPtu%cgIC+I3DtWd^=fi7>U7V0hx18uxE9H%R8*jPc6 zE9&}Xh1Q@dH8M=xmu5gY9Afp=%ovlgu`FS3LR;=;M0O{N1Wxp*P@qbq4Rffdg8p`i z`qvGpC^N^%G^;4nBT;5#`#=WsWn_sFVOfV(6g$g^sY(wRrYnR2r5ux}v8+PBiaed5 z@d2aJEbXQS8ecO^uWOAq2vd*id_FGghUuhYMk=V)b;s$7EYk&NNjnK^ z&$1(=pYJPSE<775l+9$jG#W?8>!tqrun|S(K38T&Ks}pM!zshJ@@vHv87V znfnv6XV~nuLH3q}>}zfIok8}#gzQIc_CrDTrwQ5nZ1&?p_OA)qM{TxU3aj+c)jHl` z#3@q65Da;4Z7F@swToMoo|nkmB!Rb^<+aOImEI7siJ9De_1BtxL~JrQZbG~Ryh4oJEj z8%0k<+A-}F0RLX5Hg3K@5$VCCRv|p~0I1Kq8=E85cw@?4d0=+^PjD6dOeBe>+?5;V z)aN~~XCtY+Bj&DLH@kk#45glroWz|scjeJpu`BLX>f4^Wn>Yc6qRHVr8S%T7!1q;P zy9V}F095o?DK?FIX_k=U6Bf=>5&zSZajwz`uYiH+c7T%%g72|^L*h>e@#9MM0hyG^ zDMof|)DPvlyg;kg4XIlrNw3m@sbE_I$;AK_U3d%|)V(#6qn6Qu+ai8nI&c%R_#yb~ z0nTy|JAph8$Ru04^+*|sy5|vlC_#-ID8!bE94U`Fv8GhH18jJ5OZ`ffTR_wYQ4C0V zJ3&GCL13|@+XnDI1W5TX zK_)qnF(N9dyOZL41D5B&`z!zwGrz;mmI+lTz{1wb2q*4b0hZn1-U)E81@Qq8-eV>8$g)PlttiyRzZ8%f=5o5V3_`xYQgLZKr5B*7+;1%gk~wPI^v z6ZJkeDtW&ru!za?60O)={3&msEmhp|fg#HF)To#7LER3`&X!1eYJHs9hUz1sEO$#uAm##@MxrqYzTN&1iP|6z0(l#tA|DxDyvcDr zw8(I9w4L?@s}6QIS@ZSlYSa2NO+^kH$&24q>R~M@qoLg(Y8*u;mj{aWAa|(*A9qT4 zw^}`{2beeL&l*-pQg>_#Jsc_KC{Yd7oB@!*inwPc8g620lV6 z9lM~9cF$2qsPxf@&;Q1hj(sP1_|Ci3;yxg+l3*?V4usF!W>TiSV_RLWvY6HlDRk`H z8{o`JsT_e2KzHnZfdhQ+pB=jn9bm^U0TVycBOQAHkluhyvPs9bZB;t9RpTm)VdRyA z9XpjO*|BFq18?F=$G#oN8h~_cD;7Rcf$i9@fcJR{vtu6v@+CkzwiS*A!YS4|I`*!H zkYUG;N8vv}I<}Qb4rGi-dB-jQZyvyO>|rKUA!RXyLXH&Mu^WNAAwW8I8IX1&uDsd* z-mzyH)rgS6u=gYfJN7-cNeqOx{s3tbqk)Vf!6q>c2tQP1I`)PI-YTQ$pF1|DEX>;8 z3|p+ko*l7Gh_^mJfdP&U+&se7hJ8XnrQd8srO!0rOJDHTX@apOnGGJkH(`rUmgfN0 zM#|VFk_*_*)<*h}$lwg=tdHdLDFfBv3Y6foh&=4UyTQ(yd8c3^o;Y?EQj5rcKNuDO zYU8gQkk*iCH)0T<0!UhbvEZ^uYo;y449r=lVz`evCvq*UjL+5ul=%z~oDT z(CaMD4UyPP*l?C3YXg|>0XX-9*bn425^F&G2IMDz46as#tG=a%d6C!z4EbWJYAX6M zKvl1F6t5vH(*8IYJistN5_5(ixdj;H;hvr#27x#Y;FN*52*`92O+fHlWlKra0l^PX z-2V zqMVDZURO1v-qlXhxeN`vX?O5+22|(#Yx#WD7kt5-dp7ci1F%HA)Ogq)GZ+qUprQE) z1lp@}ip&G2uaI1%^pClH4S~94HGL7(sKwyF9L|-I#>1MzFIL%n*qaNGDq9BRDiW-+ zH9&3xNK3IgLJ4|}HdJ3-YU40`KltwjsBWx2E2P%}uoa1gS$|>HTbT6~W<7;jKc;cr zdzsdXdtt2cWs$~zG{c#mJ_Ws-0pjVaKwc(6Pxk|P3lOEz|C(}#)EsJn%^lQ0yI+9k zFokIUHz2=|pdabz(5V3NN7_qGP`Q8Z6L~3JE>w&*6Ko`}r{vQ^V{eFDlwZ(!8x+>CmU`yjQUn*+}5U;xf=}LkXGz7?C zfOu_n;2JH^Yhr)~l22{)dOY~Y0kqea_5aZ8r@Nv_2Nh(oxCxJQb);*o73}N++f3NF z(E4UMy+GQCDCfm+%CpwDE5N@z2|$}gaP0?1{k zRpY7Kspf24LQoA?t0|cnlK`?>Z3CncAd_seT6J+<%R^SHR!unYOJ_n0R;%5plB-od zV|X4wR;%-Y%mv75)hdXcsKBmPmx1>x3Ujr(76_j|OzDgkV1-ixEU-Qh*RTV@@&I`6 z1K{vdjpzEBaG=u}i7Bj~?IxyPfVm<8o1-Ju9ah!T(D5XoA*;;tM*__9$Btw;tiJ7F zeFfmM8s7o(HVIbe=Ro-AqsvPD2?*cZ;~`D|Kv26D3&xBUkuJGy;1QdCI19xAs2JPv z&ql}zu()Uh^U6qveH3Cl&VfK2Acm}r8^{wV$qHHZ725Ifng865 z#eph|v_VH%EGX%!NY^mN1Sx3~80YiX+Ut@)$YO!e>d2{Wtk9_tD5FsNXJ%r~83ZE7 z#7T(GiNx4MVtoRE2Q$%Q8eo%8&52~Yn_wYXR@!qS9k~=vo{1c1PNd&Dti|O9Cy2AS z=2ab@!}ie>eVuz4?Za8a?km^1QzFgCG6*jA1tjrRhbgEjq%8tH2Ow+f^YwM^X3&oU zr2pr3gKiv-;%94}X%YWxT&zp~KLDQn0O|i}sH7+$lWfxe8-r@VRBl6AH7>40A%Xrs znrhhpPXkX+fb{>_KxP0k$tL~3mAL?u{%_UzfkN$_>Hk#8{=WtqZlX$VLp=cGK7jOp zs~~!!0^9%jfZ3B2X8+#-$xx3PyoqqXVy@ z8Vr$>BNzp~gr>s)83im?M8nHj0~S~DhbeK0yAZ_ub}BR~z+ zteZMg{E@Tqlag~N0!mT_0JKh zb_C1TKDu!BHoI{Ug=-B|x?LZ=y)+7H&jKXzb)!DI!1F=#$yX_GKV9HkK;Hz2a*Vw# zC~!Kv-D#2R%1DuM^$GAi3h2WjvH-al7C%EkWs0B7`GSdhILsvacXPX}jI9wjFsGvo zLs(qMc(@0Kb^#EVHO8fXiOV$Xh@YzCVxLtguk^~Nnz)=A$>-2>0BYX{NDMmZ82keI zCxBG?r$G#wF$SF@DL*gbE%~{}spx*;X zfm`YV{{{LFfXq=TP$KG-#e(UkRix^D$dx)tLpw_0g4eTb|Kc^T+v{ns?Sm-=+6tR$ zM^h#BBJ!fyNzhsyARgA&9<~JC9N={_-u;WQ+p$YiQya67%#?@ds5MqIBJV+Uhtkdf z2~kZwMV$khZ+MFXdD?-?Kwkoo5X}ujWU(Mb`H?ESU_wH4E%^DUvhpn3zj%EydUITR zZJ!D%57AkjOo-x<+N{Jop>-WVJj~G^J`4J3fcMz{4$&aA?yxqd;4P2x5XCV$$j}jv z$gQv4P`Vu;Aqqzt(}6ERe+Cc-qS}EN7RwQUgecdjb(F<|5XB-nM^PRL5k5SkDgiJB z7=eEYQ7HoQ->}@ID>K~XAzEm?{%=^`O|J{B#Q^c}=dkRZp9;DR;0^rW#<(MOSR0#a zxXVM-yo(9Z;cz{MV*r%O1`|8l;jp~rHx+3U0OG(G+JUPy1^>+e2K^$yd*OeF z=m1K-TN^uIxI3^Z;c(V){t#okb%^#u=^FrH=f}8iHI*K<6N{d;(&knShA?;{;@lxXNPMC=A&Ns0$2r z0w6BGchtH6TnUxO=R@>A;nXz5$9WGvrB1$J5>rlMTT#`vWa5sWj+=eLvwVWujAfX_ z?O9z(r^D<3fW+uI9iyqBCj(@H>J-FiB1-9O4X3Pz3Ynl5fM*^clQ}X$EeEv>AQRNB zK-QAr1jRRQHj>~3#YfdYBEboY&!Q&-PhM+sOG5cWcoWNL0Ph;#(9m$DMItq})cd_pb z%Pss{K(7Lb19xc$c7c8wAe)>{f1^)jL=e@x!~Vt|ICGQp2zU+wWRo+Bj^_bnld}X! zEfU;xJrzhh65Mne+3Zjpl<|@{)17jU}n>Mv9ENhj1 zP}&8M#Fzb-g`3iWOF_>9hy%;D1N_v<0|04LTlyM(@&R*aMc9u(t+c5<;MoC?HpQ2h zzaSwS=eVVp43IWe8%QAuwyBOl+5@Cb{m1Bb)IwaG2W{$1<;JKH=Mr6!&SZ!t^KLs*H(D6jP9@Ii*;3&XNzOh$8b07>8GQA)No_#K|60IBquTJ0pz;{a0W4f?T^ zJoq(8XI9w%y${YDNS1)-a)4C&JwW&cEUEPEKwcujf#hQ#ACh3DvrGI6kV;*w|eosjj@ne>4)e)D}5dK zZvjZ9TTVZa(WPbx%|P-ncpn6y($6${D^_};NoA$yBlv$?={BGhD*f|3_%y*(`e=yi zN}mvxXDoE3pJyw*34Az9^V*kG$}7Dd1~aMjv-DK;5}2O>B=LhEXNBeC7r%i179f>A zLaVI<4+;QM>9?4IVAy7*j|}_AkRp}d89W^TQt6|Cj3B|Dz6i+0Bv|Qdf!s)fmHs4< z#{p95cR&*lM{P~4^nH1_Rt89=e+}dd60G#}e1roam0ll6T@tMH&OkZ3IW8rFYQ{=3+Q}5g_S~JW9z!Igf$9 z4>uL+_WKq|cq$SEXP>HI3; zKoYF<89=52q|)O?x1$auR{B!pFQx#8_j`ccNrIKW9mq>0*wa4;@+k>cx74>1~YX6)W8}sjT#51pjX<-3GKm zrLW0@PpwU*_lBsh^wwe7*3p%IimmiNlJPW|=C!ZTl~+2|N>8sJmh0V9;BaF=5F)q}lLUMEZ$S9H zIjQt`5xOWqD!m?%+5oBaOM*&QiR;9U$Zt;pR{97aXOdv0Ukqd(30C^8KyD_%O5X%z zBS0$M>UEXH5-a^h`p@Bg7x;Goq|z;?AIPwFc?6aIK6u{+pwjRE+e)9FQ0dA{+2);s z@^zvOXkw*zsE*jy(Dm4kVO}%!bQtC%K{fPM|M#%_1wbya`(ga&5mFY@Gz3oy$7aB? zTwvEO#!dl1-y;}lgdD~1H#&LY*bjqo<~@SeU}_4Gdjw;FoCU}vo7^L?4{6Cg0;|SV z7PBi>-Xr{oDtV7!H8d=zO5P*b0%S8l?h#nA@QDiSJ%Ufc`yqu7LpZe-R^R}sSt}e1 zgt0FnM+%;3yOq{ivkk#tA0RbrWs(CKBT`K*pXx3!QD0`T?dH;c|XBbtvlFc?oP&p=UB6<&6ZH z$O$s(hp^`O6f6tCJ5Ovkz1&#fcU~~w@MZ&+iv=7PE1U6TEzIcgq_S>TdOWFW#*@mk zQo$TC%-;vh5k2E1^LcP&%hf=wjAdlTzgql(f@JJ61i#0aF&}#$HU};|#>L&&zHD$g zt%sma z9L}1@pf-vB2XW9ZYJ{{Eh{;kw(zQ@{&~L;A~w}VYqw6(GT0?BOq}y9L*jFhs#;I1DL3mluIR4C#E+U)n@C$UC@J`=z<6E^$uLAK=}uLQ`f_LL6K zL!ci3=(iJm^LD~k^LB!7-cDFJN57rmo3|78&X#Aib8ywKj~yO--Rk=eyp?CPw?OAJ z0C`6HH6Xi5@EPr|fqX%N&uFv7x@=-Hf+q#~$}z3q+2W@+fbKA#3{a6*O_&(d0Z3+C zkAL1YGSg_|O}%BB36H9`(+W|K?#jKhPkico?`#!WXe9a6_j;p#I;QJ8O}ae6TFpiO zeMncEeQumxjEA#k=rDwS4D#g?vJ#4z1*koC=4hnJ8lw^D`T%+Abb~Kn%jyogGa&l@ zVO&dg)yl9DNeV&r5D?RzDbwxj)*Fs?dl_ML;~$2VF= z`$hbU+dhcA3`pXW1f%_~{1#Yd2`U>PO?R|EfZcsC=z)N=C!lPsFW*4B0`zFW)V>%3 z&+(m=Q?=!(4KUTO_EQHMy59RRojDr9d~;`}FVErT)&mWQuEUh&8_tkQ&VrXS{mRqN zLL+(@St`moEhS*6PKH^2=G|oAYZsYJk%{bds2c<@t#qQ1QK}r%4m#KSRo{oatTQG+ z;#^8a`+cj_1|#ICdNj1gulmGr7#fAFqOIST=eq%?uXU=Nhmz}%l{2_1;v)f%8(n^Y z8CI3^RN6rD@~I$IR1Lq+1$HeK@IITijJze_a$DeM4xGJ+4mV`z_6LF%Ll>7L=iv+; zK^1#uhEhiZMvDd0&k{d#+z6QX1Z>V*__-eHR|A|^LEHu8PCyE^{AIM1V{FkpoFqTH zT*KoK*l0Dp0OWZl&)}P49*$KjZ2R|7| zNdtJL#wI@$#o42jualP|qm9Y9{S)gf-DGc?6u5a7-Mq%nY>U~|`4U-0P+1cY{uzYx zA%1#+>H^3luj`M)btI_CfY?$rF{{fBuq2q_;SBdH-8~ZLyTEn>_^$(mZwBFAWE_|V zl``$YF+Nsf(?jYaIyS=hYtu0yt~cfAc{A0U%#vSdDt0fC1snXMWxP>2yg z4lbFqsFF+Ozo6k4s^pTnq&^A{kR`KK5IIqST{8CvZ!dr>ndbqy2p~&lE1VPvXIkr6 zGQTzoGVE}-f&W&3ESarLN+4rI%9qR=!21vY5*%I@pRYMW0TwnQ!IJqoaLX%!e7kEm zh@B#?ydO-k5|_;09Y$e=<`P+EIuE~+Hv)l^+Cno6t?b5j)>K7~7>?n&xb%Jsx`5EId^2?|wm18;FfY-1}<%GXMWuodie3qJ6;F)EE zSx#rNpSp@#W&Di&4w=@5#R)K5gU(bxdmEWkpi{*-*!BpREf#2==4by#=3un_7}b1$ zRUNwmg&%1G5el$8BkG*%$2N|ECBIfKLq5EFo$3lRUU4lmG~ZM9W1{?7vMMHHt0R|2_$ z1pU7a$gKcx)!+MnPJ;i<=zk5EUl;gqv8vYMd48-3hu2PE+W@@}(NGHzTY+pLQ37HQ zkX-=r-|FxJz1ddVNyh&V!TUai>HjxCz9K>Y!wum(;jzE>e^i41O`*2>=hLx89QeO9 z3woybso`{Z<%IB)17&f5^CK9GfD{0t`@buf^3-jVD)RlOQBt{;p2ugotk#I+<4Hex zTsxD@a$7xzkN4Rpw!N39=|Vn()v{f6ss?T1_->q^>Tu9Vme2bR^XqeEsGltz=J(^0 zP+#&6^M`RQn9&_Fn2_X}Pxajib1nQw=;4~u>1KJ^!Y`qcGfdXJPvYmp2Z8PkXf{R-@a22zcYwN; zDg2v?fqo3AdJaEN0_wdCr{<1F=8Q>GRS&)J`~(>XnfVDam!1p#FXKwLy)XZi@iQ`x z0-}4?VZRQA(Z`Q0Tn9g=7ucPH$g0^GD|SF8bKGx`(i&7#fcqPWkwAt3ym#kl1^iMf zM*En1xoI6pj!ZH7IUP)aW;-3YD{ok!=L7G4!@~7H>dafYVnG;eGW6D~47o2@0B&V; z^?UsRrB}e}B7ok}Kq6}-jXN5K;d*4QrfYFoX<;strH-CQ$`?HKB2eB1p%djj{2VI3 z8*=Ldr!Qp8!%1`Bee#e?Tk)GLOVw5Ql*5C-DJ@?oF|c z32;AW4v_Z&(U0CJw;fiK;^=O>oapokFn1JLnM}#4hK>r}@IGvO&maoV)I9RO?qqxf zUZ)6IRWL|q0m4;4WHLLa5wM(g_(`FiQv+FxX#>o+T#*~UWuVI)s{GPgzw%&E( z?We$2J!DOUoe==1CWx1SJVnEC5Siq4n*%FujxigM(*cCr2S3>@u=qxbdnSksmOhKUA_Ka3_;A?tN;?E_>o$GsIP--7x~%hrAqcYOod zLdrIBQs1I%CuG%XjcX!+(-OoGApHTEWOMr~8jn_2B>y)9F0L1vIK-!U@@sU7= zkf8Cafh-|G<97kModk_<1@a7lN95xRG;-HZ7>5uYUVHEhg#GcEJ{9>RAvcUJ9}JhF z85KFb>ZYww52vGq{HiOzn!n16<`qTrYBeo#Us^1FjoQiz&ylblFt$SSOX+PR?6kdl&rs0nT<1UjjJ{2yX%5-DEWM!UrlC zN%`O-J{*TmNdtEUO_DDlB#(1a_gSUk&WJrglokOgAVH;VfwTg68!9UI=c(zc0CgUn zuXF{W81o>oe2(!tz_6f@BYC)!+6hH>?f~0R=o~^L*Mk@jWE_dBK+FX)6X2~eng&U6 z28CBHUJxz}v0zbbrN}2IRdp83GKN$c3pN7Hosn7tCD&0aBees_izHa6qd<-Tyhn@) z9kD?%nwYN�n>8mTzuQJ{`t$XY1nN?jfyR;KaUWp=dF`3z{WBERF>-2q1N%tGG=N z8;hme*z|H@eWw{U4?*eyt7aRJr>z>VM+Jwmoh?4f60SOfn9CYA;f~HLj{}^J{&*PQ4G6}W1+eCv;rmHwZBFXv zc-aa%djgygh*?0U1H51A>heze5sO^r=w0Oq#dz?u{sqAGXra>U&(-PrlPmngvku5z z=J0m`|1$Ew*?8osPj&p@vV$9a`m&>ee{%U{M+3ia`DI4~|4d#Zs0LRSV8n(Huc)+8 zZNFZ*P;I|YxlnC?D1~ZpYIKh2d47lC>2cc;!~R{ zNHkTDXsSS)QVqCDsR5z>flv*+zm4cK(6k7_vC_c3g}S) zxeWW$lk3JUpdSU)YYf#tqs_>LhVs){HNPU`J3xKjmqi+H%ksk}av@`Q>UW0`Ky=$Z znBgXY4~MHd(NnOz4VBL8$Qlfu{(zK&Al6~hU#k1Y2#cLa-}!-3kFYqZR|yp?WtL`{hRB{*TsH>==T5|(FeZ4 z=waCKw_0R#@mI9y{LX%4c|D*Q;Oqdw+k8DFjqc)ZmJS@bZu&(oK=ym;#jU z45T9oN)G}u03c($weM@`YeE?^;*LT7*%aUye*uu`Bsj(|0&*z;N9>%V=zy4yc{l-v z4km{a_WWL0N62SORzc(%>fjSM>w(-sf=}E$31kxqK5?@H$g3py#Lc@v_%O8WUs^l( z#*(p^Z0zvK>=v}cP3AAae;6S9n3mHIWQ<7pCiBnW{VxFFZf5kh$NGQ+C6-&dh^4?76N>nUE#e;V?*d2>EvFaAoKVCM z!TUY{MI8N4MI09t(PBXnW1jz`E#lWu^Cduv_$QFxNwA1ny)Xp#p{8rCcz@!1>{bE6wzuADT^f*@jtY~B0diOjQ}a4 z<@5rX6N>l(c%KKLh%5i8h^vDlS}Z7Hvgbd|358!tdL3%^0i=kZ0{NH(i}*8;|B_%4 zlX_zq21pUB0;vp;B3kVsWwFE}-bFhsVjb|;0!R@prx(baP{bDCZ3aLQ@A;b|dLK;I zBK83newlqBzmf-mLz8q~#r--wK!fk=@q1$Dw=}|)G@lwZ&&=4Kp0T^Za3_FXR3S0g z8Z->!kU1LQ^)#9)nzA=q<<6z`CZyJI;&S}hLAWV*E*C?^B>=f|xe>^NfK0NJ7CBLYjekAx)}`=a2)6~&3Lx>f z!byQJoRov{PoZ_je*pMT14#U>OiCbQM9SlTHhA?jnG*jIlQl<3S?q-PpAU&C0Ez!X zAoBq@JbRlxG5(Rwra}i}r7&w@a9K6Fi7~`2^J{dvT!@Xn*<^5W{%Cmy`Zn?hs`O1$ zlpJzB*0)13Zuon9Qc?Ofv#vMG`L{i}idqA|t^vq${%ub#rVfGL4Upyh+n!ugjqZnq z9zbrneBjAP9oB>9>jw2}r(^fg%c_!r0JAL>nXql_Yq)MfO!$1A&Y1|j0BkW7{X&RrG(dkbrm4Y z65{YSpQBl_vUL;0q=bHT+l`oYO>x}-1B)Eaf4o=|6y4bfwtJ!NZh(q>Wdv4$Zz8g> z{*Af({tORJLFVsER^UofC-vv%NaW}QP4iFaF4c09{+mhv80j}ly3Asc@I=keKc?Gg z_%X_QFPdR4cr>=kb(#^s(RrUJb#%0*LyG^#?7Zjsw_k?yYGgfy5s2xgkkb`vI|4G9;a-fC^Ff^j=(H4s8mr@jTO)GJTm-IV$k+ys=K!s41mUiH zU_lP>Wpx9!W#v|v+>_dy7j!#bHKvf(dhKhj{lAxisK-vJjSB)ZLDCXX`LMx)V#>U!S z!{jK71=coY54_x1>jQngXy9%b7!G6@30k`l$P9pY@OW!?{*ASxfi;T-)}GCN-dei? z`WDdutz8dfH3?dK7|4SF&oS*RDBBZ%V=colIm%*zwWqU7uYgWQax3(0p#fUk3uHG5 zTKgQxA%K^EytNH~W34o>X0gE9GugLTYrjC>Pc%Slv4I!>0Aej4$VmXN!|~QO{*ASM zfi;T-*0yA)u*tJ*jiIlU259Y6AY~+2wxK|V0K8GhTYK_vtW6E9SuC*jRQ7CZZ6frI zrvX}<17sEnTDum=N`QCq@zx%!Xssc8zzu;liv`v;XG^*6gs$7EoR%H~@+b*f+6iPk zz`OT&OLteagw2I5W~lfT1>klChT|!xjkSG5XgzeEVG>>SF8lf~p1%`gjtre2!Pa{; z&Y|-UAin@)=(Iw<`U*lAI%$Qb~P{cE?NsN)Q?`jZ9t#>?DNm;iA1gLnYQU4ZIzt4~md*)-QNlqxRmK>o`BacRhS?RrQpO>pT;WFH2I zOX1T|WdL!hGLRfV`WPdqU4q585yDY{#WmD+YbfR%LvkquYEi?dAkG0Y43J3{_fMp} z3hFjMX$eZO4XIBoMNT~s=M6!P0a~>L!Rf>84CD@^tOk^h2Jtrd+me3_hjE(}8d1wFealqs=~oPljPfE2VUkQ@?>ePbY{0B=r0?70Rvlh`SUTc`do zbEheD9UVwD49?`7jtp(Mr->6jsYk;X8^ax;x&v*oTMq(q8VMSn4CFk3*DoQ$35J`~ za1!*_nW!yxGlttQL&4LLu>jKZ0IhOC&|itbS%%$F=@~FJGL$0`xE_qF0piHLK-QC> zBhLfb3h>T0l4b_75uhV6#&)C04+U5-hz$QrO z(A?EPmXe^kyMWvY@WSWmhVf4e2Qimym>gxXz})Gf+zHE}l(Xw5D14k2XzpboFOs0S zkAQpt@N$ngcihTB%;g8>EEbp>63TtYnmY=G-_QchsWb6(KM9(v0-_SYYka)9$ZVu87#q1qxLL8-P3n@b(*NQyAHu%%a*9lYtMYQ0>)v|UjR7_@TyeQ8gqB8(2+Nn7oD1*6jLYawAT8h_{WrPQ)K)M75@da zDg(jMOlHD$hTTys=+J-l@R)HHuH^vYOaYLZB!d5tDN4Z>4cQU zf>!ZW?oK!*vFrs^J*b?A>Jhk$$v z@UA)D+Hqr}Si3o}X0gE9x4D&8K_^?~Ptf-x4bWO-IGQp*tepg;2EcpZcx!)mP4#$S z&0>MIuX8W6)=HtT0S(Yv8IX1)ShgWR1_Qj8j<wQ*1ThAwg8^r;F0{#@l-TL=BviAVQzi)tiMS}kQ z1>_Gv`q&EoDfFgE<~ovFa$PX*m}|+%5|hrSye9_fShdF7nTVMj>9bAQWaGN>utJ!7 z&dcFLJ?=)xq)x{RFi4TFzhqoNSGRfc_!T=6Ps)A>Yp7dHVtps!}K)n}S3>7zEzkmeJ!52BA#dwmkFIc96cQU{o24XRg%K;ugyUyc+Y+VoXX%D9blf2JO zp1mh0Pa&4S{P?}ezHnzyg{s=!QM7*@R=b(om&$NBYq(J?VSUV#FLB-k53U2$+Zn>- zflU4Rg(tie#!w#f0nW#KLMul~5KWfjb#E^^?W+X5>A`86_a?!MW;enx=ysA{1NI;E z;*B8XZiA)d5779KQlBZR?=TNB@s?Mjmlv!UDYVoFR69T#zwCUKzDaTu=&J$pcFD&J z=ns$ebml5A>(QHVKDZ7GWbnKV$YhSZU82xoenpDBT~Z37Ccry%iO8!H-Y!9ayp@|5 zAe^xEEq)eiMptd4tz};9#+zviSyw{QB0!8cQY>FMz&tIeDtB;VLn*_XDYt?DR)E}0 z;Y)9i0OV$hRS-QvL2xtWdGKzfFmE-y0pv9R#J`3!WfyC6PJl(M{ZrAn{{YJ;;64a& zla69(2IQ!S%SL{|LM#3{i+Qc*Ph5jDH}Xrz;!zoZ>_+ti(gPs7QC7%R-$4kwQMr`i zZqyX8Oa#bo)Kx&P0C>Gk&`;Qnl8yYbzy%NWZg-&WfZ%P^#2u)OKpvqczGKhh?>F+J zkD#>c7YYZ~(KVsYxz#JqSc~(2ko5voYz4?2Fw5r!5?T&hYL}ZkVDE$XU4T^DH$c9k zFmDf8;c$Ri;h@s~0Pk-UW~HT{gV7g&Lt5ANg@UV)d<)uH?dAMLC)v94z?KV$v33^* zd=^Wr-Px334L1jWQ-IWPPaquux|r6IuPo+i)6zbrZE_!V0YHj*Es&J}DW(;{KV=w4 ztZ-1w+rhhz!Yt-vKpq9)kk<91k#bZQ*2R6EzxfuN*}7f?+cwH^uk#Zi9|5FwSs_;q zVSCu)&!!Ap*B@Z{g)+QNRCOG7fdJm5e`;N_*BKA`hs7}6$YEPrBPeMIke1c~NPB?U zc4-EA0hZXHL*|n=c2s$UnR&@ zk&y9#>7@zjEfdnaC8RG-NFQ0A?meB5&eamu6so~C%wXfaRA&zu^@F?P^&NZti12uQ zt6o1MJYL_UuQ5S66U}mnF?7z;Q&5dj)5a<1La)Zw3}~)=A+tt!*=Y{SAG;0eUnuwNj(Ajc7Xh=Up>7v$iuR^5}+A9tMApMo?)Q-1LOlI4Lteg$|}%TGmR5=sh9RS z9DEM+CZ^HRM%vGBL4UzC`rAaiTMaAIEC8nGraCCyK$QW~lF9)L>Uei*8{m0;B__&ufLzb^8**YrZ#gY{$U6{NKG6po&)kMz?5`f zAYrkfq#eD?PAmXRx*HNZDHR=p7XEV}^ao4Y#mgOc7tRxqbr4MN0Wz86UWAm$dB_5k zUIroytCUxv)V&f!9i$cka;^j6-hrP^pvnN=AI5@R)5}v9!J0n$5N0@q5sgPk6>z}{zo$CCvW-EjTpCOcDAH=#2`P`L}2eBRn@(@5C#Igd> zK-6Bb@IkEAw8$>99sI8Vq>EUYq(Fv#@(8|X_zrmA2EZ-0)>}-ihE!9!W&gYNb8s92 zWHMFS=}(}36ggZ&KZ1y*nkWpQE*$QoOq>Mo0gBV6F+)1TNTVmDI>C3RxmTst-8i!|aKAVo zAf17mB<%sx8LW_}Mnedl;Ru@*JHuIEIfF9n46}gX;#%2= z$(1V}p=Ugi9T{L+I%n*sPUO+m61m%O@!LbM1rDglMpH)4+(!aw75OP4J%kovX5&by zi5U5-C8Uo`NN-%8?pMT-16K?Y|j zr>35!+OyH>>*tzYr9TiHy#)cH{7oj4qiSWGqL7(LU&vLCBL&_d#hPl>X%EPbBgUm1KVD3n}us zGR^#pE6a!)^6|u9U3m$pRz_X~e80MxEVuV+XLx`kD^tR)FZJ;*8{d9O_=jS6St|uM zY~khYv?Z&QV{U^E>5#3~%Ej>Mrf&v*pdGbg%+Mz+Qr-mr)y`Dz9KGk(oyA}eQe!Sg zncsA$cG7IIfV=cQEMLg{rdxA8QXfS-e+VGUjkk4qc7c8s(3J0t^57@l&{pK(Of}{r zc(dJIX!6e1EO6unt9rYePt`}D;!~@7r&gUZ8TVZP={FehhoCzB&P3voL}E)Ku_KY- z%cnf3K#j>j37>P1nzSC0gFvhqa=$6=bGo<jXk|_}EwKaovD^_UMfNFzSt8#C+V9?^0d}qvrwSyOfW+@?FXepzmTD zzliy`E8nI395i1%5~nxmW>#qmK0*LU8-;zi%{r|i=t4lfG*~Wpz-;PmcC*=`OpgK; zZ0gw@uT@VSFf*^Bo=-#bET32>e2E)9h9{(opxt@W^-p^c=j5;O<`ZlV2RMI%Z!VCT zfbcO8&Zqdf1=LNEUgKiZ# z_7$=|1-OMEvZo?i0JjuGOCbCNliL==a3DiT^Z>CC$XpVqgSZLEb%1tbK-|_-N6CBG zAo`=6=2DT-!WJG_6&JAh2G$@As|>S{Cc zyt!55D~lEBT=@ggXQ+}(o0RhraDY5--U3KNfIM$*#iA!Fu*;mY!8?q?hmYZCC8xJmX=F&QsIgf##FO$nM$I7GzGFpO7+I*G&Kj7U4fCQI0d^w9pD8Mj) z%JKj4_8st17F*xX?CkSoH=9k{4M~8IgkA$8B0@s2ppb}SB>@{!E%ZAd$X3OY&%TFt}&c6+?%yLWqSm|wQ%wf z>;Z=xddlg+4&qkrZUk9*2zCx&X2QusuouCzoPkAOVo?pAHE`jR4>#EUL$JQK^%`yy zI%6~g8=d}G){&|gI*;pfan#F{>}nr2DA|K|+LB$V+p}bUM_!xZq-0m>J+Jn_$TY0# zFpehaN?px^ zrDW@DLQt~QGg|VBbMxCVeqCWoHra;_O1APYTe20p9ZPlx^12C5O147xWXaw}_*KTS zWGi%kmMo(Ig9XG%$r^%^9mtYhsNJa`D<$g=xGr!~vcd2SWPl|b0}tO93tzswlAYso z>(^Q8fi0=CeO=_X*p#fjbR9R{Yq?ykyOYbMpuYgF!z92-C-E#okHBHg;WTH7=@6Xh zSYk(9mysZH)!ndv+~qk(%ML{gBo@QTP6gu3PQ_5Uk8DA@Ah{D-!ihD#5T0}4@(3o@^uTm$>XQiv1X~+s#F}Q3 zB(3SAps=1KIZ^x)o{!+fn)(DXb|v6jQ@$fo04LUT5Ih6m#G3lZnE`UXpF6DSv-bdp z)^r-+C&7s|^>MNS9E*~&re_0o37oa2Qvy->J}A0;0&99Hpf83KYkCtrHwZdTt?y(_ z&3;-RV2cJUu%un*FxJA^I<|l!qXw)8g$LpCm?9eRDnc*Ag;!XtKm!h3Y51WA>Iw5>yI!DD(4<%G)KGVAuG|ghX8XwoM_v}@Vp0? zM=;Sg|E+oHyL~d+@qOrH%VK~J`3Lg<2~I5K$)^6>o`(HLIC<0k6caZP;VQT; z?}Tu^_-}SNeX4GrHk?K%7*6{s;rlEL#-gRh(Otd*p+ayYo9z>HjD(cfUj&T(9Uwc! zep-}<_i;-Oe~4v$eoZRC{RqaS37!mA$HR@G{SAd*v#>$4+sW{Tziz^%dBohey8|$O zI?JZ>e|s}l+5ne~Dp3M9QLIzsTw&u6CXmlwR|LjcAhG8Bo2-m5i1Neke5px|>ZZ0J zBY%K^IHYh(6AH|%mrWIW(ENEcx_VRRch(p*t0JiL2OqSypJfq z?q|w78sQV+OzqmNl=CGm|GwHialNhG>42F6C$-DhrIs?l+Fb+B4RB$7l<*n+S%W`D z&w1C9HMMI4JJ;?@Qw_F1Be^#V<1-_Hp}R=nvWKh$igg=G;6>#BG@R&nvF8AABLfM%O$oHn?&ycC1WpCacsP;3nea3+KnYw6 z&nmd^bryRU{hntNL;{!Cz|ImlDdfKqP9%_J zbYKU<+Zjg*WEmZ34O`R4{}z%p$N{@ZfOmaF0>6dIr~@02|9x;G zf!{*ir~{uO{66C-f!{)M_KVB^+#nDMI7R}45k3e`Byd0=fu)qdA0c=AMk|5gfZ=9< zNT30p$qY~e%iuW&F1*rW@1g?}ZGz|kul@0}iw=x5@wOFI3`Add1+VRF#HuJSVyi1c z{f^5;;9sy$2n=INZnO-6D?^DVh~4@*d{rz)`jAjk&wtp*91`jym;yW`bO?bX=aZL1-?yXVJ(X8){A>Sa_DSaiQW=+W1iZ1;pyo27o7rnk@rX zHE-pVoaCrkp`v$z=d>>wkI=$U^F>H4Idu~Kt?0*RZ@vLc5(D}mQEtJjZ6lZ0OZjP7PSQ=ne&KGqD82ZBn)7Q z6<*=ZMab_DW%8>Jfd5bf32d?mrDvf{pndLL<-m-DI+GQA_H75w*QnlP2DS{n^2}$U ziLAsm>R1xHe;#^Zxa{vTv6rk8l)w&zKY{C#gk07=iDB9_=V_JG9-(&gvAzkH{aOSI z)GAJ;?~QOzxX4|g%?CR9QBH4wtEW}Mc^tq0K+<7=sfAN7AlIC|A@e=@VpEs=VAQ}v@?tu#Fcx+qxj`Dw{3vg3Mc1H->5Q-gq)KIXV1g_ zj^v-=G|nc510QvfAN7nXUkDXe0|3?uggU_KgAwQpPj9$_Is^(v^J_A`^7nf& zMbC3?R~YAh{Rsy}c3y^ZYGn87QgaV`pGznQwfHOS$ z6vIv!rV-ihOJMTokw+)L2wop3%SjSwWyx3q^&(IQd^u%v0qO(gI&BGbVU~9;37A(L zOD*FVrXf+ipyDr5?0f~F5xkrZC%dz^!gD=b*tPRV*_|aAXCzJw8krj`E#(X*t=DnT z@@z!E0JP`f@))ZxM$A7D;`t|i9Ri=jvmH*F`aJ=OV@To}<(>&+Bu%}z5lt6Pn))LF z+^K}KO}#Uc+rvpy9|+I>4Di6&k?V#Y zfu%=P?he*674dN7G!!n6DWZs@5IP=C6mc#*r@=+p27EGJ;6n!Tp?*C~l57Vr0)8`` zxeszupb5S&>LWpuMT_4B@H<4nNzj|1)u)Hh@>TB7&!D6aAl#PsN=kxO;QPAxbZ&{y z1E`St0%SgellI>}K)Dzc{r3Mml7EKtZbFlnc3-?0)eM)jmqjq`zB{v>7SQm0rGB<| z816d(vjd!Dd3peTOIntFk=z?jOwLj89Kirh&KP(`!R0J%lC>3GV82eXz9K*Gcdh8A z19uvnWIi!~e<&^UGm+c~Cz)RZ&s7XC^Sj`=11@K_MKtAh3Xh8bVx{W*1hUf{y_s?C zN;OCZrSCKOB4uyCBoVmS1f0S@auLc@tvM#t8uBxknP!9)NvSga0@|FDGhST*N4U`X(nh(vil3B(Fdyr|S$1b=5mb&F5(hsdx4!DW|Jyjg))JHMP&A+;bpPgz$aogddR! zh9UfDzRW$Gj)#(!8_ckrN@GaeP;H1mj>(=nt6cN?kx4mA2KUZX>d5hT4gd%EtT8{R z8eV*7>Rg@$tP>t#6a2e2(*QU1DOBm<0|pqBapnhXy#>>6*gCu$ zM>)A?qKU)RNREf^&$x~Vw}l(dzcf@w3K#n$Tn#tncuTUUC3!vw7W1-_I;xqutW@-p zN$SqaN~s1AdZdrMQ`nFJ^ImS)+k(EnH;q$Ho<-qP8CT>>Zfmac*4 zD!A}s3*PkJQZ-RCjf0f`6s>oqM-)@0(6avXHfPw6uo_lvc_ul~dNQhX+ z8_1!Kek9a}0jKpuL=Suznz39U76i1GhYC3NcLp9&h<$NMWCs|lgXHxop(GPyhnql| z1P(TVt`a!V1o}u|00XCn;u7d<0>u*Ol_M$bBv4@jdq`kH22v(ioN*>#a7LPd!5Lu! z1}97c6D>{+QznKCjyxhWF=TLf!T~9Q<8(Y5EB>Lj=K31{p{CV*iI4*aPT?kS8+%I0 z4KwuvlH;QpF6|7_#PT2-1Uk^v=Z4A<*rYNElzRiRh&^k%gp!G4#yPR)l>CKf?l3fV z{GE*S93{0*>`mK=G`2h#3b^F)v9(q1XD6*7P!DgHKyb)hTLNN+mf9$dX^KLY*R+NNSUML52bQ5DYSVe zrJ8>Pky{8u5UVpQCRoB5J!X^>JJSYn&nS%44wh;DZwhOmgK91%7ydh_Ha?dfR1cbl zl9~KFo5&)x>)Z!&f#1aj#$oFQfgGlkoZ`fKSawID9D5iTr{hl;-^8|?Dj)MIYzSq& zy^P*gLSTDG@eAecdx~Ee**<`L6hT_shmeiZ$;|t6sfyBb(b8gHh(9@L;ut6PmGD>E zp9=APeD%mg3|p}&=|xpjp2r?BW-PodxGcvLw8&&8OH;(r1ajYP1Bf-5=^ zzEb*1S5QkfC&@G}#A z!(DjmpP4e@t}aqtQgwHks*@_ctH1E@1L4;jq0h#MDE6rMovyc{gNoe{S8IJjH?$Ch z(+RZJ0CoJ8Rk^|x^F0DWQ&+U;%vzIHd0~pdiw%PvRT3y?6;VvO!`}_cS)GV-WF&Tu z%YURgZuE(#I#L`pA>vy#5P$np*e)kF%66-TUjrh}T1Z@_(D?*2ZAvBn*6^?N|H z;rI8<2K_$}{tWKG=bx$0`uIw~%6|ns54dOHWU2BUc-~@QBXE9$ z=O;L)j1=0`8Ciy$LrCFR^KUYVb5U$Kj|-iG)efO@IGgbu^#)BjBZ)RDnuP*M#?=7g zJ7JRX5%3IQfEnKc&kb;f^-nB?kkdd4|tmj)b z(9RN4I5}$mBmwCs!U^l3=n&d z^Km>54$}?R$jFpvtq-Fi`Q#rSorVHPF>8S^7*5EIfTx}TlA8_B3^=F9uTYa0!9ufl zP*8<}u0Ztpa5@Wt`{22QfkFhnh35+fS|f1$1t14k&<%kD`rxaZ6*U)``1$NDfbN5Y zj?2*D!07`JI2xXz3>=EU_3&H`S1=p_ku0AB<3~CFj3R1Ed;q9OVak`ojKN7^_JpT9 z11!uGcqYLa3R40K-;=`hc%x6@48Y7I5~X_~Jm)h&3OOra1mO&YD*_5`CZcA=AMz=5 z0!#-uq0kSWz2S`hNJF{HVrr*|m~%|6z5$>u1Z2Ke+oZ4=BUn_Bb2<`yLA2Z32=ITC zJ^}bgaFWr#;Q4_8vRbqfbp>ZwJtCmcj}#V1$N3aG0Hz(BVHKXe;7sO{_Y{k%okNIu zR`ePl^F{z(?_;im=ia867X+9iiMcfTwvTzlh1gVplRU@4GZN0^*_~Q@Pk`zBB2s=( zIo~wQV=uxH6}WU!^(fwswur3SybHByj=&T^oC2rYB5)x*=QGe9f%Wh_$Ut8NUVw+M zYZVMcKnh)WjLASR7xCA@CaA|@cI&?ew zQo!lM;3-C80o^eOh%iq$&KD;4O!K$KKl6!o2V6HeVPju-su*BFX2LTa&KN;Hlf+Nh z$&7)*Hq%_rMEW8)VMVYmM(9Ff*_^Mo#RxggiFa9a3^YSTcoU%b2Ds$>H+b%2fH}Vg z&ns|-_a_4pb|!_N+^c*F9{}ckB2k1t!t)JWnh4(wL|8@4U)?u-%tJ3lKL#h5qu@Cf zE)6q$tdsz)eF!oCa1-wVm=(VjfbaU4JK*^;z?A=~Yi=>Ma{@6#%019Wz2q`H83vbD zZwD-dG3{YdW|=&lZd*~0XCYw?fNy{+ScyQvYJ5p$?j0~UnOWQ%f56Y-1pq$}C&l_F zJnt|-k@mhE8)R^X!W>H>)Tgi=FrN^KMfw$< zf5D~cdGdHu1ns;?%;%%O_?W#`VT%n;Fb{-hKe#l^UIFGmiTPr5FOIp%^*jKc?qe>4 z=iC5ORA_jB>H7i|dc{-(|0?q-B9UvWLdfG}OduRv)R6N%vJHP5eT>;QBIO+*Y=KiV z5y-p}iFCJhG6g)a85Wax&*fAMVa%tD^PYM2CJ=IpvjF~^tBycy1yH-fsX++r3r{~d zc|oJm!UKlzZ=2`oPeSr2!iO+IyAGZ!;lfwhywmRiGq7FC9-Olk z$$aX~zMR%O9XDhE=(Pd{G~ebIz}Mv8=A5qgV+a}EoYcH}D+!zTAca=wLJ;4V3w2Jz z{;eG6@vTS0i_cW^gXtc{DAT>iW(2u=EqirmdJ|17bxJHaP@+_q>>rzcNIDv zxar@ufYB3mnTg;uxO}2-u|$=VM`5mZ7k>niU4WGH0dg*!T7thQ);ujGU zPB$Z>Eh+sRQ1X2k;>ahwH2%I%^<}B(AtLBiM(Cp=mXNM(dD%WUqq9!~zhthc9=|BLnZa`Jx{Z^_u+ zPQFs&ujWiTNsJS73^FjrS}!n85^sBfs7pvjV*zoDcwv*c{2+;Eq|g_o(3e;*GCaq6 z`#AaMiPsRFoCvaQuwG<4(|Un7TfE4M14PMcb&8K`4IkAURY}@SDROI4_*o~;mAtPI$|qy*$%OYHAa33L+z4?u+3XR5;WFvKxFw1_|(34D%%A<#nt zYs(Q>qKe5uoZI~fh_qshOv=8bh6BvmOU>p@ZUdQFYIcZnFCoxU!%WGHkyD-Qb5#}( zo9EudEYDT)@&yWp!1*fYH~h{R0%MT+A+W+MXk&f50exon-TrP=Yf>tF zkNh5|6#3Qn27Df;WDnXYtfi=qwvv25NIzP&6|r-%J-g22?G#>0whpz#)E9oC4>g5$ z3ZKkE`azahMUZ}wYLg;1*eAA$0kuQBQwD8m0 zQOTgng5NPnPx@jiDMNbP;tvYa<2JqHT8jBMQ)N?;+)9-Dx0a;WO6vT#E^O&e;YPIC zc#+MCf66PWkC-H3;}cGr$mCWgFme_H9T;#5-v=g-4Bvo&-bT#4LWya~nE14*qnKR5 zlhW~t^!7C^PisJp3Mqzk^RJLPi{WyHQstW8MSdu|P3c###NVxLQjG=T`Wkks1W zz<9H%^z*6AqG=5X2_klwnoVtCgs>mjDr{7Wra|*9)c8n-+Zy5S2P0cprHqZ^8Q3e%tUH9|0q58aFboLDqkwn=4kgO_UZlqh z>{87LHxn@l==Vlu&N8(Im|1r!rTKtad-_dO2C1tp8>p!6nhF_s{pX4FUb>ORkB)1 zYGshx$|#J}^8(^#NGe|#G~8q;>Cv5zbD;rQTNSg>eCb14Le^IEZd2_)(Q>oyW8&38 z+I@m4J$%+iqlvXeppv|l0(r=XOI~W!x#MD_RefcY^bMxYK(KHRR%=HWwH*D}w1e-M zHVZ+cAJbYQ@8}!CcgOQk!stc=*Y{fkw^6DQ1EA;=MFq$i1J&r0#y8W6J|#Y<L1@`Q=n@hDK{kF;td0Vx%o`J)kb zZ5_r)nddN#7f?*vIYtR){)#5J4{bNMDq7{BTHJlay0?11wfI?=t+vCiAf6`LnRx;R zBh88OL4eX+-9#`iA7IVZJQUoZG&d-{`SRDD1Bs2b3iE@>FPC%V-|h3SM$ZK&e{A0c zo=yzx2c*96^nsHSXi;shCw#37MFBeJt1b<~;cQy9m;4qu! zY^*~kdND;ZKVau^vcr1}@)%Qa0(=?S7K&H5 z4@A1GzujoF=X|z@Gq}*gOw1#>@OPG*Es0>355V*n&wBG7V41~w$S?sZ-1sB-xC2Q~ zUX5E=aOxohV%K2N3V|q*mRO{e&x*!T8D92*BtV=R;0%HjYW468gFB3WwdE7=(e$&3 z9FNqoaQnQD0M@+pHvG;)#0z+6W- zE`IoM`0sGSX%+n#rIPiS4S-n>m(LZCmX@A!ZYJDx_j!VG4d!`(JPU{a^|COs2IC{h z8cc+1Bb{w>$a#Py!YW!qmOe$&KY;%xT+U((!$Y=L+a~ksZ};ru2eELd;lyeKf%kWdk0n-waHb@+@lP{ zT=RqG!j)rL6TJ`&oI{SdTFM7-H^B9{0s)22@>YQ9RS5io*q`9qQhujca-qOS&EzLM zKfa8VSsAe#P=0q_yP*P z@DBp)KvJOaMd(K(bR=AOp3Q$uM2U%{Jmnvf!A1C3n`jnG%^`PLG7axzQzoMyVkMbs zu%;Nsn(OLqAvydH`0xBia00R$1t$yoH^Or{1AG&H8$55o$w9Z1jyC1T^*7*$?}#4y z8^Czbt>ukqt#ESCtsgvl!G-w`lKgPZoty-g9(%Bm_&GK?Y|mi(74i_$ z*Aamg@*F(RFu>OE7Cdjjg)g&=n@h4siiG{Eva)UF1B9GoShBmKGGh4_xL=SMx6O)g z!rC2N`0`FhNa@>V@~_TvQPb%?Xqw~rZ$7XO`DN!X9o^ zQ~dnH&Uj+Cb=~cM;P+D`-2s@};L4nDQ*apq7u}w#@({Zb$q&Kh5m~oD%w~k1gVXI0 zcn6-h80e0`_walHhfl9Hw$wah3a#bV3#^=r_ht5IkMtIEr(u|EVM5L?iQ*>BrQeG9Fpq`5V#(mYvE+D z`?VzqhFCNVcJ~948SFB~40eCFP#o;;iF9Ng)jJJ#Gft3P9Xr_FW>MIIq~zLUuv>Nn zdIF1;V!C+7K%)NvKyukvM`qE zwl??|Oml`X)Pkj$ew|EnhVUMEe49*jlJGBhe(;%|Y?1J`m}UB2A1UKj%+cWTm|~cp zXQ9db4S@SIi7&A*mif^(_!rD~ISlU>Sg;iH=V~xtg`^hXp%6~jAkYh*o^YZjcUmO8 z-ENsb2}njw7-N{;XrajTaHRhk(@$F%%k=d&_!ms`Zs1E648uwJs8yGQ!89+a4grt* z!sYV3NF3ibyU-e($N`9CSAswbILWRn zJe}ck#x=>#nKk`tK#{~MPJmP+f(HPuFA*<5;0$4PrqgXWa0#bbg2C_% zgxiND(aZ0gFHh+EIfa}P;-J$Ic2D<^6Q9sO5x~d6$rJi#z;imB3~ziS{P|2A#d*=~ zMjQ@rE(gfPa5B8PAD+A6!d14sO@=qz7?3f*&p{P0j|12~q5lF9pNEq%z`O9g0~fx# zlOvy9#sIR-ASJ3f(SRstO&sWp!)_I2#}a)75}(0IiQ>1T$A^;=`AFK?03?*?55Ghm z0MZ6dO0+*b)o|gG-IYlGglK%gg6}IbJO!m20V4IJ$`Q^f@SFsP*+CDB`3%-^P6FF7 z17=tn)k9A=H1Gx*YtPBpWMSE%EwHfDz+2ezW;;3zCh?Jf@4Y(U%l9P>O^i;$(JZzL z4rOP9msw=)CIrrb=PU-;n6HKBYPj&dmQw1}3VQyizQ*b?-)ZZK;=F3BHk9B<+Aj_o zr)5#rB37t(GElv6r)96s8aHO*n5iQhCQYpv<&5Rp+|h7jxvsYZ?lgC`?y(2Lg$To8 z`swVE2o?TqKLkR9olp_~UQqW+MTQ5dxoST0cU@;Nemo+8H50Aijg+W85{Ih?_H&Oa ztc%sV!(xn$2C;c6KO+^N>0ZC zCp=<=8-peq%6kNbhTKwx?rAf0_bFGxVY%)l(%4f9hZfR{wcV(Ssq%Y?=mTC%E9qT< zOwwx+8_I(dEKMI&U7$X!x_zV_p$L5kJ7=f_n^wOtbf|wURvsRlFf~HV%Agj@6N3}g z#g)ZaWwDAN^-gO2Gog-o5Pfw_)Nw#LgxMVl`!*5KU`W7Mu>G0{lPbP{AeY!acm<|jWMGGOlgAyoDES4(DAl8g1PPw8jK*z~8X(d{d-de2L zsST81r_2s6kUAnyLqxRYImSXz8iz0+v|^JBVGd1+p*-H9I30Y`XG~p^8%x#!?oQ#S^n3BoQk%PWG18J|5;JXcp*} zg==z2U1tINltVBlv$C1lDMn}W36O-6!Qh;_6OVyNC!~}uS|1x$p6fUheZBA_Cb1}( z_-E3{nYPSEG$n>7oQ%Rx^^1obp*WzPUczY=fpxSN11kvzBTOEFkq$oHu+OAa|29K1 zmwM3HGV>dlOZ=vGXcnzYN_9``jV>7@iagaWI}y{RElQ6n%u~7O%H~+B8o{`d_M3)$ zZmKa-lJj8UQ8V+om~e*LODbc55<`bUTBEWW4g*8zh01D|4KG5NO&f;-M1urqv9h%% zRtZULI%ldTRsjQ%CB%)Sx1Y0ASi$T;Bq>vIwo31KKn$ZLl~&bgJn52Ms!E`1GSFbO z8B*Zbh7vmn(|w-tp={@4Sd$tnP=XhLaH{#H1iZ|TM}ayNm&HtPupA|VT0`|#sFYqq zj5Ee#Aml{ACx%W+{gNH$4+4&fD9z4-eD1GfbZOTkVj0P#i2;c$!LE{A&t zZVOyUm4zvRr(H+u+Ncg)grweS6_6qmsTM_=+4&-za=}r_eVO zU(p5hox`sS@s)WkzGAoGD|$cU9-;49e!Yq>=Use-KIYfg_zM4vUmo-|T!62vR`_z; zn;KYksGuNeOr$s2($cPxF+fq*xWz6tn>k~Mcad>QlbD`jZ(<(zBJDJ+#toBfjWDU`{0>z?HrmBz4kLKmg?z=O~H2K5AmY zmwpW+Z=H0@J$>tZ z2>3RjQtciWwM)|;>c;l~rfxK%p6;97$a?x8=~L^Wjlg@!K}DlX|Bv-Z>DlB@^hk=d z7RqY$Xo%Fqd1!g42Uei#T?6$>x`{#XE)pe+d-`*TtsJSvHAt*-Tf!UnYVn<=-Vl^tR27};OS8{m34?uxJ8mp ztA=f?jO#(6t!Vq8N+|e_xfS{e8VhK-+uqZI6q-OE2}9=}qI&H{>Dz-U+DK;QU#dO` zKl4|Ntph;bUjlf#02v{(qDqJ?awv=AO>(342tjhe^)&EcP0_9c> z4WOsbKz^O|sDC5U<^N8kTTn7^U&L12%G2AOP0P>(&^`G7)Tf_C7`aIIknr>-z0L&D zY`q-U9pb7hT(=rVN_j{=dtM_2S?{W0Zq{@!)Fxkfb*@6n-E$*jJr$`0sSgt_&9qzZ zCi@{+<<%L%H^)z?4htu+6p=CEWun{{@6PlQh@tu7c=5T#Qv}#z+lRQ{316Adb~PgpOA`jM8;Osc>06WBKfYHKt!IK_iY0h zLt|@(+#<+rvtn*;mNB7O4Ul4_9PyI^CN{a*!a+7$_cYf%hRs)EZkwmP*8!Uufhw*I z)gTuj!|X%I?ldpdiC)kd?&kGnrU{CWm2$$qw$J_{ZYzbI9r_65j2x=aZlj>PLVDI_ z34D^KXHxo~afP%ped*LP*o`{KN9j*MMNg?P^=TXXx$Pyj9jvb|V#rO%JAmhg_L=p@n7q`2qeq3J+>CP(U;fK_JH!@9XZbQ*X-JY@x-k?h+5?TZ9^+h{taN*Ta0tU&rBB*o0=lq_ znN_kZ3+J;1MI#A>`k{c{hcN1P01Sy7A8CXk-2s)Nfa+}u{K8Xk_hF;B0bz6`}kx}~0~a(i`iV}0Dx0dB8aH@n7-?d@i_ zt&*yN*-j=4$#Vn8)|hrw=xB|b-Pi4Sn45Ko+d|K*bBp$L+YNN1{oQ?!c1sU+qx-m} z)vnqXGqqWDZtrThJIcK;B&uJI+8*1lMh<2?ec$v3ic~ZZn@=cD1;3Wj;(PkJOvRCb zzJGe7C!Gc8MA%&+vDR(NNxba{KgQ@)Izo0Sp1wGP{bRP?J`3Zb|8ckIwq@RWWd>+% zGXdz0)XIHsq|9V%t2Zz-P>YGoJDi0IeF35Z8%ht9-zW;JYmFEGN-VbEIvIeXJIInr zO-Mh8K0Fh{u_&5?DfTTfwqm6hNeg#{%8{hf%#3`|+Dwn?=_}BynK@QH#=7VtAT@OL z(8wC-IXdeBgKTHLViwx(4(d}vSiSl5-Laa94og@)#X?B=PdL4#RS^5QC z$(foTNeo6(5&XDyC{GoJKZe?~DQW-`7Vg<;;HA9!NJT|mI8532hj8@|SQfBDtjKzF z*gNLcVR(X-2Yo&&C)?|anIY==D=8xBdUawQIPi;emLI6FJ`J;=35^`hUIf&*{-@xi zEBG=dUa8b_P65z7ia0g7U#tqIJQh;F2fVm9Iz^e2v}ZU3!-=Dsnlcjk$2?Akm3}Ex zOe9R=w1`*?H9X=7PUmn)67-jub#5H(T%&cEnCV;G(nH+%Bv!qr zH&5rVlbS$2^lB_bxZX@QT7CX>WRe8;NkexPKuYzF%*r66^QSj3L+0k{pQI7z>!s6K zo2AHkEdbdIhd~O>-WJB=uXPL??2Eu8Ci5xaurl)FQq_Ls`Cj9(4 zz99(5A%hK=v&%FQh>wI7;$+yPADIJVXC|$ZAYDtyQ!?16tqD_;!{j%NDHFN~=+j_V zbr-;JDf^x`0B_pdI=&>n3%mC0>Xk31fBEqwaarqy$*hs~0 zS}P`rD816xD6_z_Yja%yMmRN94~cA`^nqMlHN65RwiM;!n!ub!BX{MNlO}LD>y!d! zQ`s;*mAqi1+~Oh)tuk}}V<~^>0K!{*>E7u}_njCCHAmeHM(@WSvAIb5btvNlruCUV z`8M{+Ybmp*V8i2PVk)NWvnj~X%oIRLUxxf;jZu$_NK4*Cq!NYZ0;!t@FlBWLBd%8i zL)r;4eHy@bn5ICoT%17JzsP21W%PRG6!beru}vNRD2aLcoGA?rjL|=2H&C^a#~e{{ zpyumkQ`iEoO32~>^04gyS);E8y~t-lBUm>U<(q=Zr{5@r6Vo8QEs$ntt;zZ2>`Kv| z3wGNz{iXIOWAD);|1VAbr!jVK7~6=vF&xt+bz8j2O$?M3_?DEwjY+x+C5Mrcz;rQJ z-kWK1cL#7PZ=}B&piqlqw_NPS9R709caK}{88O%CBs5rOEvpmTKg2fO(b94w-C;b9 z?W9SUo^MyhF)DKgE}22MX;*}#%+wi}uPgH9Ebx>lS*I^<%uMdfP~c%W~;u=;AoW z`HSV!P!}$JKq7CU^2?#R&9=DZtPQL?!@@NqZEq;zRbs3e>w9{Jj1{~Fc0QdW=-3lR z{#0AP;ad5FsJetUa(06VeA!IHk^a9kjJ|5G-i^QmdH>lc#?v21R5%9;^|+t=iR2=Q zQ4Y!|wp&*6^yUc7!*tG%>;lR%d*4BS?pV>IK%6tM2Y218>ic2$eaO^K0sRr3a9}rCu4C@m*N}dSigyF1`G~YS5zPw5?nL` zCIoio&mcTK#!d!z!`@y0b@m?HiM=a!VJ`)N<>0?&@7e!4d)&H7m)^a*u$O}Px6*t6 zzs}x@o!EPB7xq#R|5kcGH)ZeO|6KQeLPJQG-nJPS*fr6;6ojY0b^g3|Bg1UtSlE=o z|3c&VdMEjPF&#Fm34WY8u=HE*D-^U4t1wem&dHW6yVG zNU3+sE~~46vBaun5K|cpFee{wd#Qh+@gf7SRx(>;MuIZJP0#En5K7DJ%sDnQU^9Dt z)*L^xR7@(ftNqL{w@u6Jr`?onQPYzB;+G8AlG$%T$x<vp|nsttZ-qWDXY}> za$oyAV@Eele&?jg@7px_otG{@B>ZRcTiR59{-6KqKbq9#Z)uapGtwrFXQcCjg#V0} z-y4#)eU}KLWh)oJ&l(e(|CC+eFBSw|BRP6EH7Mq;{?`T2E_YoF{xT;6$-ppVrhFM?5HC-9^Rk4X@Re&nv0i9Tam^%D}A%e>7V`F>q z*T#-@6Lsz&L}-jF1+LPSbYM%6^QYk$?++K_&8eu;?drI79P?UjxgiIZf7y2+ItgF zqF4(|%CQvIM{^trn8n%Y5F}u|Sqd-nbXg^UiR2N6_hjX3moYi!N>LGuUWp|#dt5?) zF;AwvT!jJa7eaPPInT12-F}eAAhHg#d_ZF%cC3_u+oEc$v6TL+nwSlN|K`&%Pj{PF z+2w1@tjuh^UPjBSv182HgEph(a2~vGq536m2!FW(qH4gLt6!SSA$G1_9Kr?(B0T*I z4s|PaFse-OsbO3aE5ST)^;)b76>{EhXbSx-5)1+o^cangKFE?EIAr7C{YgjI=@Qy3rHn& zHIfD+tEzSk(BI}$_PKhKQ{(0=cXI~^DGwMuVGV%! zO+e;+6q`P=sfe(8c8{-T0C|@5`@1<<%^Kw9AL8Z=3TWLcxkUA5Cso7ur1TKT&pXOg z>z%uBPX+-IhEsmdX0i zx6GIAC#!4Rv!FN=RklT86EHCixlpZ?zQHeXGf!^?D0TwSX8XJGscs_G$Zkv(4$YsY zjt6w&-g=>Ntxp4KZzHW`3h}!6tks16N1CLrn=hS+G!m@;`;xjUjmo>JgK_%WCV7)b z=(e@{U9gEE4V;xlYnG#jEZZbGUN&DvxDcs3n(|{&UYJFEnozJFn=|r zVAc0)pWL}k6(YG0!s}t}-Gsn6yQB7FkL_uJNrLG?K zZ6XwFW17`{%ve+Dbv*qrY>qJ)I$q=ULOy*O+#ES8CsgkAsX(GUYs8A^nqJzw#9-&> z7t;(j;?nB;MO=FkS~zcH`T@{-DUG8$=aHEr{R*sE>@+uLU{K4q3vqxm$*^hkNHw7+bXoCrqjAARWRVa4-mtm5$Y9b=>H^oiiuSs)R^9u#>A?W zP(f^)&%~u^R6a}->J?4${uuOm=oiX)-Tc+oA{ryZ<6Wtq6LlHaOI3s0vf7PzaTBTT zYKI60CzqicY5n{+O-2jnu^h;UqlNyo%{h~iCnhPh-(UKyf{P0Pm}a!UvTD5&#G8&I zzD&s;Xv_U#f7XO+zbOL(18eW8@@eZO=7UZoUv|H7W`3vuZEWfa4z$8K@X#cuvV+rU5q zBkE<_SR;?pvLSVsk=#0;B!C`A{w=G6rn+vP%nGRe3H=n(;s>Qy);f`ATtA&wo(Jcx zrJSrKdJ>W})pCCm(fEpvES~{cSb-YpnvU}gm(iNPH$f74-kc`SH>^C@rse-y6M1s< zW#w5^<;Jdr9BV}iFz_*r7Mr87?d=QqWnU5y?pk2((m}mq73g+V7s%)p(_-q#9nN?A zs`Ek`tvmjV){BoyUc&%CvOksPnMo!-?pINp`x=h$Hmx=QT6K0tB?{qNU zfQ339%*m{*ar5-_GLG?s<8J<=DEdn&B1*yH?Q)*0GfoA`d9IIX-NB1F&~@>A|3uez zCv~LjemT;$J5n7nh!;{%IO%7@9t=o5og(#`5$vC`RAK4#N%8Z#@yc97jxim=vt_$u zq_@vV*K|G(^Gnn{RiJAcEFHJVu#&&tg&1*?1;^P7*=+vuI6JQdL@?n}x}5gtG~|0c zo5TsrFcaGCf&XRLAje7C^t0-Q5KaY^ff$aO{W*=aQ}S|howN9L8FHQrbFAs1Zq$v0 zTMVwEwQl}kH&gm&qo*RTH&LMGpq|>rj(baGXATS}^_%7lLQ*c&;M$7dq%AH;Z;hRP zPj|xdh@>tA+Bq0W1ZysLKy*Av;Oa{vIGzM6`teL|*)B3G+AFblVs290Iox!YSpKQy zmbe*Aq%nJf@U-c_#gm&j+Ghh5c3lKh16c_=X{=P%2Fb^hZFq?PF`T=Qky@Qsuj3U9 zQ7XG$*66lill7M4JY-~?=Lh?_d9^Oj9BXfwfiOq@Zl1Lc<MPvt4C4?Q^U~+ejM(W1jn8+Gq3eB!HJ!2WBmsp(>PAdT zpU28b%5v0lSxn;=u21K4u339CxJ~*EI>(4t;om~S-D_0U$;7p*MhoSx+gauL8P|xxrYK&m z*|h#dHt|h)cUpc!xpRy<=_4uZtJp7cS42!NMl|M}rYJQe!Xq;%4x}*CES4BGohj|f z#n6O9378x%7vp!aXoQf$;xQI<*Xfy&u_80Mn^NNrw6|0SW8^lTvU++Zx_mh_uOCDe zW_kLnnKGdFIIfD6b-tSm7~&PH4c!nZ%xnElgD4ZF6Aox$e9gIJjtu06R7w_?0g0G3sy)=72C3(d7xb z+*rtKlsJtgvL}^#sNs{e6ZUE&=-w-vP39B`8W};^M~#D?@OB?^;f%ZGA#S$=pw5^7Ty4_HR!jTcPOJr39Qn!7!?a8=sf?Sa$tRl^Wno8&Q_?r0z#`?}q` zyB+(x-TS#Mk9ND)*AI3(4t87CxE(jSEpbuKi(EuV6ocF>l$mZPkitTH36mE}x0UBT zE0uk@Z2rkvZpa6BrGUtW)&{?7JuhOZS}erM(i&PmCO75!;XtU6-Cy2;Ym_rjkHCEA z=`Nu@FnX-3Jhw|4D?ZW-o-YC%{rE=3%NGH~bAuKrvdjq({V-$%dG3u>I(<0bD#PJGN z(rQ-eITT|*2;%2J5EQlkBSh&L?Lr%dY@k<9$c0qViEbRl&^q}|kPT+2qr#nhJU zEe3}=dJOXjDc*7@v=FP?*bVNWpAvBE8DpmOZl&qW(*ekD@THS7tcL9oC>*8l-N zZ#4rs1Nk0k?G{Y^=kA8aus@}7zu}%e3}`$9VU+Ns{xZl!zZ^0p++y=#g&ZW+uWyEq zwHzi~yuNGdO-wynQs3M)^#-PLI1tyb?wX1V_df35cTK&8spkIeYrCf2%GBXP`ITK$ z(JqAfI+kq(w(!Wo05%C@=55jXiwKHA$P0k)K7p~3ed?0 zh`U}p<;P_W$Q=8($Z)&?dw5r{YJo!A1=L9fYU8d@+(`qmLIFF)fSIesP0GkUHGmbM zo>A)yLr@tGhpHf-5Kk52nlf)kg(}eC+aaMC7pO5T(oX}grME&P_r|UgzA=z+TwjP5 z-_rJ_`|VQbO3A+k@<&TL8!dS^W(2({`|Nc4!5*oXm$w$9|sGt6eAiazVA zzVba4cH=|#)fgGG;yLoIb#t)%P@vDp3Ih7Mv^3Yl0McAt(qGv7RXd1gl!p6k=7P!{ z_ME96Xh@{)(){ItW&eT-h9iG|p5@;h>)ZtHlo#oR%?(_6Qm2-2;~NKQy{lOyEDA(e zKEvXg|3@tT^-*7Jkr^I)aO**h7Y>hxp`N!Isw0CjEH=wQDqe5%BA9wMN0&HD45VCT zRx)}w5{GnYmJNbz{TAsdX0+?yaKkXaNG+6;2Ira%X6MttoTY+}{umdp>}Hy$FPkMf z$lO7Hnz6}G(U;8PQcWJ9Lv4s=&Pw1{PQ5PRNtpl98P~Pu5bI{N7I!gO;59rYM9$!t z?te?*ZWqEkK@kNkWFr8UyOx%%0fUEhBsUg$nvh1<-oVh1gYtnW&b2L`*v8O zJ8-g^M?|hRI*fLVTZ?TNSE!Gz7WV02v0g?P{6jV)ZFm2Lr#H zA(j%SS9wRuKeoP?@boW?>WeX6$KHN}Tf(SqwD)FOu5t9G)YE&wgz7^p5wG`(tPS&y z=rSpv$nG+jR2WlPhcjcII)Gqj@RNC}prAlt&|#wAQ;0XW1<<8tde=yjoOCE)G5kah z0FlZYh8T9rYU#u}5&cOdBeMjGTW;{`1-Pw)LQll7EQzBD!V9u;VGxtvDjf1+nfxaV z=;sRG#nc#J)h&aGF+(`J47(O{Y}luM{_-n;uzEw`-7(+byow<25GNZzVYyf7-KO&@ zSULos0l6Bo5N?KCE)*f*Ap~JTtd3ow`P8b(%b2@CcIy9YZ$gWBBbKVxQUZHQ*$%-0 z8w9%UQnHz=cP{Jfonz3Swpi_9A@@$yAez9XGgkZcc9M8_9v1PpLet`Ujnt3~Auch> zFN<_|a3Nw&v+y;exSfd#*a`zyw+&>J{=$O2EMRh&ifbHkeO?o=9T*#MB>tkKG|zyb zM*eygAH@Ad{7PB5zh-9o!aWz(Wlis;sn)_IU|`UP@nScRAmN-5Re-$)*HZCn3YOH$ z7a*cmmf+;TFlXnH*04I9n&U7Fe)ptqtd_M#Ov9m2cr-?Bp>mqL>jhZpO6t|P`uXoy zgE^YRcCFU{Cb(|gfz7`j9>wt`YJFCy3&*`l{USBftO-Yl>~idEt?)#NU>^y!hE2I} z%+1j2W?-Q}ZwRsLjOnv8c<`qdDOq}l=y5WE6$-sx`eIHZGe}2o#Q2T}5BXM4?hrQ* z7qdMaan8kT#`R_ac>pp>x*0%ly`MnfbDsbBy%>OEV+jhhSkweX z;L3Yp5=R%fa+eqBwVHREm#i0)*RwL9;?X#O>taLVO!N z8Y@A%3`<5_6RcvkJjN2&JaqRlwi&3OqbGRU+B{TO-&uC}}0ZTx)@D-`=i{};e`uSsdWAF*EE~^3_+EP^S zM}TVbfg8hRp8gw{$iYTygPVCUSbTVXomoO^fCzq=hocKjB!z?R{Z4?O3&EB#ipFy7 zkLQzcyo?mbLvidf7q|M$MMZJSBwJ6zBOC+VY_sUaw1R3kd!PN>iYfzjC}8lu5OU$u zF>+7e5h zsMYEkF=NnPjDc~o5N8qbiW`Kbe~I$+DpqeBXW(>d>P;C<|k+IIz9>1 zo$~DGc3kdOR=OQ4xdF%(QRawsHQJ%>tXnjypP@2Y`UBHmkWMmOocbtvsED*RBbJ#t zA@BFVk7hkkk4SHFyU5Iy3L1? zw4mJs(~ncRF&A$OK8y7y{{-aybEUHD4CeDtddLe4h_Wu;U5}s z(t*A#BL^vc*?{p+N?+<-v@qCHn6Cc+Rwz)0>ri|)Jcx194y-SuH(*WFOEGTni-8d{ zUXlxn@$g)k;mTJ;Hj5|=ka!a%LiO&q585r-R~&VTO48Y|;YP~}F!>%4(79l)L1uv> z>-f#Pt=Njdp(M4;tCQ(b8@&djOd89TvhcOzF-k1Oz{a^UwWg zRNO*zn+?kvQfFZvoGE#WOr(-TWkmJlBnnL_H1}3aIS%GP00C?SAB>e`D38(!io1<| z6ITH9bmm$vn98iw|GNPF6*6QC#DffcilHpRoy7hy%09_BJPFS zCD0AXJnLy0&Y7tt7tccaeynKAi0Q4gJ$){dC-o0z4)b5wEw%;J-)ES!24`bqDcCOl z9vsL*o(N{#N45i&*OMCX_C`gSSy?;lv_{N2S+v<|HpCdIra+1y4UA&5tRV!;{`|8L zf|BW{pEgEx024I~=mOzH%7Gizm!7r_GCiEKEf$OCdecExFmxk0jAQOjvHv|yf@W9m zW~6d2`TtP&<^fg}N8j*S`ks5{&cZP4hzJM>D9ZpMMpU*zM_GoM0YSm*FdH|{#+?}! zMa12>M0n6dqjB(Y4^a`9#6%Mk^@+xf7^B8S#keGD7NbTJ%@f{VRWGN{ok2<7_xt0U zM9%5zs_N?M>gwv{^!0G{nSUDZVT{2oNv^+?TMBpQA#$g2M&8-;lKj3A-Cy6;b3p5h zbbaLj|799@;RtyrXL({kIa0K`tHw<47@Tb7B#r`b8-7zR8UEk3A&Q3oc#-1_%&M__ z7ah{TEwyYx#)k8@VwytXDBVhlbB_n^?3i5qdxr(9;D8=3LvKdP3Q-j*F2!PHVX1Z$=?s60`EXo%Vs^FS62^$6(b zj(?ad20(J#s^kS%V{F9XGTm2bVHgg8Tk!|~FM6VE z#rv?>MYHPX*2P=#UMw-0_Bsjj7yBP=AXmc2X#A@K3H#%Q86T1{{1Bmo>o;;`c=t9L z^yYw6E@!0Y=~lcOCBem5a`Dn`dV(7oBv-l z3{->5ZtY`FPhgRwVFEM&O@DL4`8{izjn01+BIbwJ6-b651w*|KNethUcwg_kXYqEV1D9J!NK3#mtNfdw|z6rOQNE z&Wn{eS;J=b6iDG7X2=s-=h+9eu3V3GKjRC=bBFUEW0#cYU%mdrU>V(JJO&4kNomW} zxeY!piu5nBD#jl{LLIzl*CBZaTbU7@_C$D9d19=GBVrgJevCW+zWbqa6Ca$f+{8`s zalde3!fl(;kT~c#j6(P_2c@NO=s`ZQgvjatZ=PfW7!Li9=v0kf39jP4@^>7op&`Dn zbwSaU*3F%f&v}h8Zz9v60oun#C_;H`O;48kZsE_B;BMg$vj|t@wWyjGtf&|c_8SP& zj1$Y3(H6?@!}}!PrSms9J48b%4%ZymZU@~hf=0x?OK?89Bqhx!_=y=F`beDC0~3cs zK8((aMTuOk|6D1JDh~d0SZyERE>nqic~s1rf1?;)(mFUtg>9@5slv8@gIOeVZ}eIJ z0iNA*i+8`3b^_gLRmPQg5-o~`gfBxwVj8LHot1lY+>kB*krX%@Tj(zYa~akA=Qk1F zWZl#EAKgT_m2M)uQP&LXpwtaMi?`@pR>ogVi-xo=xa&)G$Vc7$r=dfsx2Kr*Cvo0V z(SI|%?_#9C#gS&R0G|Mf?ul^krhnKL>Fz9aHu|Wdr~b+IC_TOGoKMEOkqT80=rR(u z>*dh|43h-YrRWL@?CIbVNV)?^cTmOozrLiO(on;Z)Aoy$OT3OO4%fF}8kE3|bQsqB z$*?|uA=Y^OBXA#-Urlf0>8*<12GLt1-cAixJj^~a@vuBlOFR8*aYdmV>wh3;w22Zd z`cM^Kpj=*3!=9yk<#C9QDfzIxE^Y=nSo4Go}t=8NkKl}Xxxsvz2B0INYN$%=eC zF+e7NNKD9XRg+L_urZTY@vzu@Dzo-k<#IU;p&${^hDs+jXrgerT!5o?!_pTZMe2@n z387XgR#XxK-<;@!FBgc9gwTQ}aCjPKaAFul1AAc_+1mKdkr4kt7(l z#MUMGK`RoBQ(m;u<4G_YNrKgt#0m`kOEB&nE{U+AJPGc%JOUaPxQ&>?PQ^kgmc^sy z@p8l)uEdkz;hHlFq^~ZpK1!7_1)?&vOR9LA%9N-M#J^NN>p!u-#MxVpJ3#Zf-i$YThh%nEp<+9XEs~k(&5xrb<}j!r)yhV z8ZwQY*?OmP$?WCn#Vwg8r*?H`%bDqVAZP2_>$7X?>zsAjnznR%N4B%JBi&NdT<>I8 zrIFdz+ESP9Xr)wLO-D_-V?$egUAlGESL=ayR!vh=D}F;VQ{PnQv~y|evKf@Qwx+4G z-f2FurhT>3QopWtb=GMv#6wJy`rm|jvnFFl>Ow%69Qq-(N`?derDb?JuI zY;#SAlyu(wblI}tPZ^%O2 z@olY{mX7)?GCMk3>W}5$xXjcdS(B|@eeC+_)5cGmI&6I7u;ky zoN|~npfdlAu34XMsc-Kf zHMZAxIBhlAj>%4CRdwZ(Io0VC%Br1ob91`6b!~lfO*>k-Ei+vzbndL`S?PI87S3@h zXH{0tTAnUjFuQDSdVbkLrY>K20>5X{_3JyxHmb@O%uO#WL$kHllNQtUXsot+G*o-0 zHl1m1YjRfCudmBAQad&@wbpc~hDY+u`VH$^vvujl%-Z^vbej~QI?d%e*_upyecJpC zX}Ug}ZO!WHt?F!OsL!S|Et!r?O;ft5zNN8aHCHsdipor9*#_Af+o<#R`-bLpU42`}YDv)nFtuyCy{4hwQF^U!COQX-%c3^x$SUz) zeGQSUM)Oc9#x$2Kgubh)%jPQT+ScYauyn{Gw4z|-)6=!9YamxW+Nx%4z2rA<_L6z? z$|}=U)wAZDoSrp*{-Qavh*JhVE&bK@))uF(AzQCHY<0SPLD_;uPABX;4IAs2GCAGh zwAW@cZ5=GWE{#o%WNmC}T?Ngpt;uG{`kdyfjw~6AVvee>)5=yIxuuB}+?dU@cfyp& zPgJLuESy_5&si(&P&HOPi-%fdYc?pn9k&7o>QwwjJw)-+6(igub$ z(7!orQ-Ndwh~}1}=Cx2XJV&O^X;2nG&FrMpYbV!rI@!)u8`A4pDe3g8&P)@m9~DeD zW!gKOs_OKdMGLFT7cMDt$jhWnVQFqz$N;bA)UsRC$|#dIZoDJR2jx4fGA%XP4RDRM z=)LvpjWCdoeO-H|u|-y)Y~dUfJGZQI*1{9Y(hFu)ootZ7w7s(#PPM6(x}XzR9$Bu| z@w{2{tI7;rP2Jj>mfCvqx0Q=#p9F2D7tNbjRfc|3g|4&QX$*?VK}Njgs(oWQJ=d zP33>7Zfhn@8BIf)iYYain=+ zrb#tDx(0PmRH<|l|69ucPUe57@V{K`(j%Sr_WH^2fa^ODWvJe5A_6!h)H_o*86`M5 zT`HZ?O(aJs2YsuxPB{`~mmS#+Y?Sk85OEsXz+!E>uA{XE(L;S*yY_J0nP{M+*0%;P zj%(1a_3Y+l{ac!}Tjv2ZP42UF8U!Ur-AQo~$3Q$pQq?RW!5uYkE}c9*Bc&qcDuv|? zoXQ4K`Bj~b+Nn~*uR{<;OgNoVl#~8I@xPUz5NSmc;@+hfCJ6-<;i{>tY z!=U13WgDe!A{uBX2-z}7*W#UrS&q0`WT%nz=*U%2-~6&_L=BxSar^3X^i(_ zkFN@ySH5iFk_F1(c$BKEZ-SSH$T1zMG9m*Zi7IWInm2!zF}`%CmbRw7Hj_!Wwlra6 z;uuWUwrcr;*^B04@)I2Zh64Y1gvmF%CPp1%r`ywd_x7GuhhCCKx*T zo%GuFHaPc=204y7X?Cuq)0}n$r0_EHW^BRViY=FnBmmN+bzb0)3p zc61F)qv|o+te=(1b(l2Q>aI!gKL_)Z zxw88FPE;DJ)8FU^B2vEV$b=rs;bxo$Vr;H19j;2G{?+9wQ}TbXp*)OYljA!s%hJ> zX7^Ic>hy`Ts!r@l;-hAl8xa;_CgQX=wRWJ!?O8;eRo&)%YSo5^GfdjhF=HqHa`+p(@kRp~wD48e1teL{L&at9)T~F4N3<41i8EW(v7E+EwVFTn_1a zicf0Wj~A$K?`)E@Fu2KdduJOATZyd)Ac%#@n$%8u*23lK%2}t7?MRiA(D8wrfKqCc7Z5vmk$%z?bOB#V9 zF@+gp=x%Styol!IJR3%n&RxE6)&k?{*~^j}tHJUR#wl2z^d#WGA!0*j{p2RBoS{3W zW$TdGQ!rxFe2JqC!m|d&9v6qm=Q*mIDxc8q(i*$(63hCxo<=cLVJ#(-+ecbcJL!j&~BC_jT~BPp^IF$QMWv(qLNq&2sO z3}|TG6zb#AU(Et};#!Zvp^i+U6;J7dtT~}^gz<)#@dY>Ykw?+2iCr~?rO+}72(MaM zRxy9poHDJN2CV+dm=!gN37}P0S$0C%vI;Zk$nZ`L!CDPmOEL=SLfE~MFCg`BgY&pd zeMb?>g|$oz=2Z}Py_~U-=Fh6nwz4v!L8pOP5UU8L7Yo~rTy?FmquMl<8!3opOM=3s zBjH7`wi+zuQGmpO6u8w^$;oY9W^J2OOa3Yi4PbSslbyfrgcPqxL!?PN1thDR>pQTV z8I@flV@R@Ej#bd+Y7x~A)Uj$R7kwZn%pdESQVVK)Sh#2*Pom6PK&FNjffZ${7M#O6 z6s;yPTK_T-)#|4u;dUP7GH}MUFhoW-drA3xEOoIiP({5KhWToJb1eoyED$)YSbb<} z#e$z4OJ#6qrl1b^!ZUVTU{V8oMRg_dZ9t^R3&2%XWs_xxqG>xugGMY+(Zn^=0?Wwg z%>Zgq9k5~v4G}AAA|Avnoi)E4>w|JN*$!pZtcI*&L-S*vriG9&Z}S}JbNw96HWXwh*&eaGt7I(S2JI<@ds^>qk0u;f98vxwK! zcp1IaSzBMreG3`nPBIA4Xl=;R(;uE`VqoJ59}B?7iDgSYlhO8yN>C?-&=`6qgTN?6 zu3lk=3r~uu$i#FMt)g7%%_TO?rzs+xH5+qsWm>b#u^+O4FsTzvW;1cf4zPW~47ATB z6z;Ex_jzFoBGK9xCv)Q*yESB>qt;m#F5;CIU3w+r{6!0A&6fihPskAPt6^5hfC`zH zhUZn5l||Em`OD!~q1q1E4#`(v4O5b<43)EKUqP2sK8J&szfoEN6Etq3AXwPuwN2{{EO=S5k{sY&I_ku5%S zwO|v(7{wCV7-x1{Y@QZ3lSQXOKasWbCKd*h`nJyLG`eDQ56d~cOv_E#F12r>?Mkww zy18a?9bIQ$ISjS@R1A-;ozyNghO!B8B-fDvOiq<41Dykrg7R=2E%7wX^0CsxY7>zC z4xTRKz#`@NB|5cx(w*wRn1A`Fy9T9e8zye8QM57ienbd;$i~QGa|8C3&bt)gO_x<-_|RROwoXdpyCtRZ?UK^?eo5)nIPH<~r0N+kVdfm)GbxR4nv@u8-sPY`+2m$*n*p!apv zf*A&C>~qo9rJ7}8)nB@9=S;*?HM*T;m(pVo8i=CA_RdwZGqF02Uz9!(LDn+8R#g>S zh>`Je8ezMhrpd_7Av?rahz$@Kx2PQvH4WfGs%7kk>QL7wGm{e>vo&Hy9PA=@WZIfG z$PgPtZYIlmqge&cRXB21YtS(pu8rD*_i_QsI1nw{N-NxI1s}k6%p}miuioLeh-&0ws+!B2ZD+OWgna|-eA9xX>20(~tW(qq*dB#w z2)MAQF4rdExoXwMd}JmxXKHP#TPp{DUKQu*E2g^K2Q$zQ7JYShkri8oO#sXlK&J<8 zgn`(qr3oo5Ch}H^rokXbQ6#)BHjuwsUz5SQH(CHYL8{%b>t5!dkMe;88p5$2M6*3P zOJNtJYKickmMRH@b`t1p!wJQq z#WeIb&@^z)iDh$6cIp?(Qz0^(>g2(MMU8K^sZl{TQZ!6coQ9!NF09KPqG(;+nvA8_ z)nqX_M&nVJfkDu#vY{5`faFx0tOYf(*9hn|5O9s@N=5T#1l5^Wv1w99+ksM%%7@ny z70cFw*-dj>2X?V|!lSLYS$7&7=iqZIGy#)plAEJtRdZ%x)r%Fb4`A?=Cpv!-w_BGoXtxqdu))x^>%6IDz{1j->u8U^o8!^1YS)hHz1cqtxBO2KXxJ|)g*e^`)!&<_xq zT31fF50Idi7RjT^O~k8I?g7%9&T3cr3u&?M&|EiRH5QDUQ8gF=-j!DaLX0OZy-EGcqVfb#ALq3Zz}?`pY_-e44swRlmB$9R*D zaXk4&q;w4lmxpP*$TL9g1cJbr_0bzI@Zeb=pAfRGVJL=#}kVy`Q=#Ycj z1F3`RuTnI#iIRYGJ?PoYNcbm98x8c7<1@YSfULkK#6||ioi#Xxg;$zd5GpwfJY~R+ zmwFU{N2V43te0L1Z)#@?Z#1~loC~jTEhrK%SkS?2pI+$D2xXD!;z9ExJa*jeI0+|| z=T4ol>|*&Q+zNgPsx`m?NQi&tx$?&agNn-)rp)-j=ZvsXgPB2iWjuLbJo)Q*a$hXz zIKkK}bQwt$p8QtQT?s(=j0VdxK8hy?U8#sv6=X{P8|dFQ7tRk{cZv9c`&&=k>@MjN zzxDp+0q{3(p91Xjx_kh-@T>L`W4G8X+$F&o>XfcT8p7Fw}QP?Y!F56U885IB1-{_9E5r-3Zk4LsN?ErRvd~Idw-vIe z%2-jARMZOG)f@ihWLZXkv32iRU5=+qmy4>Wy12Yuw z`|ta1j}?h{*?hc7U10RCM^9owPvU`Rs2jTwi7ya^MFo@JdGhUdB+yE5d&rl7he))Q z%$5NWV@^6E2zAXIcQI~sqFM%1)NiE=XOK6&OFZH};^G<6A_0v#j2uHpHx+q_y~*M9`~N}_>J1j!2$I+{ur@!=hKSp0Kr2UO0QLRJi+N{sFR(+wMPc=d?9f{<4ba$I1E9OG*j>?cAk6WNvOj3*i~ zM^b03s-muF)WBU)_F4-Ol2%paNUA!Xh1H#%)$IZcF_#@lr+S-(wI6of5qn~XE~Z)a z1$T5Yom5?Hg;E10(Zx(MXg6JKgF-n7DClufl1#-)(rA9*e}VqiSU4px$P)bWRfTxV z{lJx@^9Sz71o+s!PL1E!dAL-iLfqm#qyP_j7@{gLMEzL-{_K5BfRDW`zT~mR-{u3b z&A&$h?xCyXE5!Z&9tGIrUrvG0<>FQq6x=G_B*2^E+JxkBZQ{0sTM_u30hKFQEj{SJQA@j;sNZ7L?>%@4@t$l}L?v9! z#avCmxsHpuj!1C@7o&oIpwa&ivRy&L^#l#-3HTQO8iWJl8Xp&LSBV|6ZGpvYyO=8v z_#XdC0jw+0ut?o1UZ>RSs5nww`^8qZ^N^R9f^}=JbD&iYK33n$3uiA?cdz>nRrekD zk7UIV>W`>6;2*fSV5mx5<87z#X1n(Qr5^D1P->6&7Ny?u-Xk*x&3oPl1pmOhf+~20 zkDpOlEVlZ%qr6Joj#^8s9sW*&@ARK0_|yK^sDfV;H&JzO62GC;Z^YG9-K!J-oj}ZB zTd=CGUCp5A0+|#JepGOUc-}=QwgS>!<>PS`q`(DIB*&!*cwfydWrmC3e++#QMO@eT z4^vnBDY_aB#jYPdq!1jpnnN#&-fSvxFX{t%ukrCa=v7Qfp%XWXjn@-*AlWGX#xW$p z9-m$uKYv!$fLRvP)+Dy5(tEMvm38uvB7$wbm%I&MX{RqIi4c~>>DoMnVjlBSDeSK1`~jRR3>5o!QY zBRN)4Q;)(-t6U|EB(o^V%uLeX7&0>rP@-hw`j3)KZIb@RVy4yr7R3oSE9yQF1(P40 z)O}qxZLnKmp_^e?deOzNg?{1iBPVzToj#7eSuncu29M%Wi&==>Yz9b*ih9in2ZSGf3+|l0BO&h36~dV#tD_umToX}dPeKe)q)HPyX@r<*Smy{)tul-z z+6-n7nus!c65@)u5ISju*lAej2=Q315PJ<~58W=xWFfkBJ9n;DW4zl%@SJX^GmM~< z42PUv#nR|@CLOj-ig!Dm!`)8h?XTM%5U%QW2ZYNOzb9^%hnl|e zR*qRpWd^$!Ypsre&mXZIJ#l%OiA1^+iO#b%2Z;wi7m@TuV9<9Zme5xc2?B%k1|&8R ziCK0C{%K0!-Ks+{5k2%6nsExp58O&!e33}q9(0MJeDTn+ z)bVrp@{DchIG(ZP=AnW^$H6Om9xiVSj4XLPVH-3`PS|qtv?PN@X-RUaWlaW+k~MKX z$Z7G0oP$P^XUpLWSq6=QEC_=&HqAaTS<^|Heb8S}5jW()x{1O%%aEmF8Gp+nnV|g{ zH1a$YHE%u>iTU+N)Ssz2Xrdq@;YL0Q?RpZCppoZikjM~8)W{E+5~4;vPB-$8OePw6 z3pMh1e#)k@aB{;O^!MpaMML3 z2h?<9O$j|T-2{_~rrV&J4o}&1R<2f9PSaT!v8J=&G%e<)!|a%w&dQ55-4r8B_olP* zv?MuAXJN>iP}5OOxaq9iSku|?sOdCRHJwi8G+mpK_kf!&A~~R@yUvu*L(|=8GSPHr zsHVeHD%r}_3d?Ca3nSKa7Mzw5xaqK(z)ff6#hUJBBTM(Dv+}egIZbC_$eK{oQBAn% ztlU`B+3=|8G*mU6PUbY-UL)@TH(f+>Kuwp|!>DK+W|7H6)0L~H!&55R%GC17Ok9CC>mZq^EuKv!>}3b9oZ#DkH){DW>AC#CX{MqwNSx!^yvMTBnbiw zzdLJ(Sa(NMjEw{&sagm5a(3Mjdt!&ZVa+N9?pR~eNh%D-FTQGN346mP8Fb3M;Y3k* zmW7gxCt0fr(*gw$p}@>_5sF4xe71nHqNvc$Tr36;Lth*=LIttRc#jFW$AR&ay#bu2>2b9MRX@%e>h^)BRk!PKFjH@QnJIvxLZ&ne}cQkfH{ zLS+}N#6K%fhe%7{Db!!ErCh!VXF9T{;|FV3h_889d5k8m@~#FzBRdv$F;r4~B~Ffr zDFc4xk4k0Of<@LuDMe#v`PwdAYb6#wV z@WY}Xhx(0so-E;cNsqRD)$77`uINfUM8_QfcqjpeD=db5#zPA+PAyo<8rbE;--A7^ z5+Izxv|T$G4ZK+V1p7T6tocP3Kj6GrT;yFrk8gSR(c>BKH}v?EcL8l{Z}D%V$IJdl z^teD=Py5+B#G~~1rQF?qUwlTXed03gZi_n;576Vu#INY_ZsHR>;xaiwL7Nt82!1+f zt9P@9$GskYHGCNk9lS@68|VbZGVwG2U3&b*-%6KR!}V!WC(-8c!80#BQ*oowh}?JZ zW5d3`AMT^m7~6e%p{=?1RNfeLyI7J>)a+BaeYjJGQ4%Av6CS)qij8Q2Xrm<4PE)xE zC}7?ucNp!+V)s&Ga~jJDBxtiHai*c|U-w>zO~hJU8K{j{CGmPPwP2m$cQ|bLSKhDC zDGWc_y@99Hyiz2W$HSdPH=Kq=BE-{B7Sh1j9ku5ou%bg%i$v*>C6Be;QM;oSDp3%b&?9ih~Nf`_F1YujSk93yn?x6E`>i=>UiK?W`x6yJlm$*eT&(9V{HMA&FG6k^gT{3yLuh=I#O zt}}(q<;s|}EF?;DO-xeN#H3Sxja`U=%R(Mfg%nAqQIdxTK3ORx-2cCU{uNcVn)0^m zZ{2dPc-y^1dE!gFS3?H98lIoWUR<1?_jrmykEfm}kf0|Dt}bNI)rIfLljh>R!cU7B z^l8z9#S-hm;i>?8BLVO`|0MyyOX8+b0&WUl4GAOs z2UQ&y|3K9NxVn&Q2>{g+098i-sEz=r8UjEy1VGgf0IDAVs&>^~vD$GI7wUF8I_Cd_ zRJ&|j;ncY`;u-gI7mqvq-98@w>~(!H9@iIt2U_2JZZ{(c4l~4apo=*tBsLZYhn^^Q zy06huzRUeUdZ~e*S~sE7XJlvY~Ic#v2v=z|nHjr?7z4&zjQEKTwRFPVZSc=K_(ujthxJY{TV z<#C^&OnQ;IJ?d9@TY33!pL@giB{MY6s$*CYd6Oviu1If+E? z_*#MKZlv!U#M6Z@@=?4Gi>~QK>BoEhmYy7~SaS9l%ylc`FLaC#5fK8i zO7W5Zx$w9Cag+F5=%QbPx&U}B_#FaL@w?!bJWLCA<~>i3=kv5+x1^A~CG~ttY;q3i z63?eTgfc<)Ve0jKkiDLNV<8Yoa6CwEJha%-PD${f-+C#(a6To!^qhAqd81w8uN=$< zcP5c~JoyJa*d#zP*=TH9FSOp(A{;g{95#A`GonlUIPqSh8}OT{U8!6k#x?w@Mz%E+ zY#;ws&6JN=#1izz!z)$J~=a)+I|H>C2;a;A(-t#F5QiPUDd0A1^9p#a(-;93p- zh%}glhp6V6f$@mO(xX)xRh1=14m&u_H03vZ@0c#}ssHCBJ--(2$fxJ8 z^WQ9>=e~mfF2qxxRm&Q(`e6I9T)V&GLrMMqMDiv0E{69&0hdh3cn}^w>`BO~zdT;-^!0pt%%w1#n6^139VPlzm~K4q;`kZ`VmjY#y~1x?{407FQk>b3%xE6sV;owiqvnt-&1N=@TUOlco^%Y=r)7Xz~D;XJxy#$>`LIV zEBJBJJuMQ}yakt=VwWK{{+at1nksDbZuju`gZI7{bU5cy@bSL)8G%0Y&iApb&Y&w3 z_t1wp_XK|n@c3KsaI#?Q-c3R2kgYFo3h>nkHR#MR{6YQf1}$aikIU3 zXYO7Kl=iwGxeWZsy^JCzE@=fydf0?Ydidn0RSS>2m3I(&{XrxAbHI1RGQ9tcra&vf zcMJK0E#3_t=iT7_kka=A&js)gYI7r!aT(T^S)$Wr(X_Z6o~foj4c5dhxYB6xc=Wy5 zOYzU+ROG4Ru3&dS{gD4Gyo1q@2M;Sa+qq-i z*+I>5gZqFRhg=x!4C0Vyf;|BiWX>Y|J;6HxO8hw3if`J{GT|#5q)5$JIE*DvrFP@L zK;sCfAkKO05t{E2v%$5ZInEPD-{L%ZK~QsRwtG&X)rm5&l38%DzrLbN9KQ%s(O>Wj zGtry@kM%S9zIk!?oV3hIa}LveWaxZt!F~y>v}CZh-dnw&!Ju<_@#^+awLBgx9mYwe z(>ilpF6j~|Q&vo_+c6qpMst9SzZG*OngsC6lj37cP5{}FxDT!YFI~aon4RF|&EO;X z@_6!TI15ay@I%<<&ti#b#Y%Ael&ttuFqYF5o?xs5J4`SzFQEy>N*TEK#A7x(*z6|E zmelm*N%|bC5>jE7QYo3JSxRM049zPhj#LIoB;QCV$UqWFKv!^AAm8o^?kB%-fAC*w zrtn|M>yrRnpS)86NOH#2MOn{ajG0@#N{jXn)57>-e3bTI{=u7^VH=$hn=qLla;Uq} z+2jmCcsS|sF|O0UPnYNtuhH836~VUyJid)i(x?60(spM^yJMs63I0O)e+j;k>@j~= z@>eu^TJ2&LX1fq)#f1H9kVq5D75I+F8VT(4|{+0RL)-^do1rk%q(Jn zYZKcOF~AkUO@X4mDcBXn^7aJ3(s{oM_QvyeB_B%K$k_FUgQg!!hd*VPs{Jb}6`W#J z?U2`Y4x<%_!w$n3bl7zKQd^gpzQphJy2O$eID^gbOn7|YUm_TIiMT`%{w3lOLA!^K zi0A1e&*#OvbhhAKaZiGv7bI^>(ypTX3$8LH_JR%e8lKodZcKdNe-6`nOXtBhgE)vp z8#D~)!zTM@!HL}OKgDYS-w<~R0x;{~B}0Idl_Kcncd8s@%dzAfI+J0rp~hVYnbli}z)W#Quk{LUSFt?;U|xmc%Aier*~ zh$R*J3p9vaF7~<$lQ75NP>s^c}j{q0@ms9+9IX>TI0C;CuqID3n)LI|F`D$1CJh9d3^i&$ZYQNR4Ma;}z=coi5(QFYAt{jn zuv#u^iW%_e%g(K-t=(2+yW#P|JEQWOS-*{z;H4hTzrp5a`7RuMnJ9|yq|xV2cNayf zyWIQK7xDMG_#MfW*th%^DfV05^=jwgdhdI}T^T;37WxOH6XgC`1&o9?N8V1s;O&w# zmeucbucCnQD&kzNIRnm|S6H0oARTeH_#Oo-KI&Hb`uSGLcQNtBQGzNQC16njV1`u| zL)n!z!{8mGhxt(U>W%nk0Y-rb!TYTO9#HT0u z6g*9tg-GbW@QNW>PN=W?uXacE(YzLKY#}jg7%V)=^(|8LZ&LgMV|!Vp@ri$0O0`!( zaI7B^C!WXM95kobgoot$1An2b?}d*mY(z`MPAC7Q*m=QE3_Ei2VgQQ4UYo`vGB+9O zGVv7`e6S6mrF=ULd_Nc81`xiT20jjz$NAd;!ne~@4L2O24g>U&=g2I8Iztvi5~)D2 zo7Q9qFEN5dTu^6dQ0L|&7e(qMf79rON5_f;wE^fNZJ0Pi`^X}7QWdF_s>sINA`QSQ(!fM2PZp^msz{ww zMe3v~QYTfBn{taZ0INs?6RDIeQbSacI;o1(NmZmya*$skpp)us z!>7vIsF%P3i_qWXU*OWZIR4bp z&0+58p{`MXjN&}exZTjx?Ru94Td){JBZr35#bqdWyFx?t{#7;-%kWO9OQ!ELX=gL| zi(xg(4-t^QmG~?{FWqusHI#>G5IF{mZU6-_ltm#;3t1Vt{{+7=m{Xv^DaZO7od75N5VBN=kKBuBUG;MBR(kC49;e4s-rMwe z-@AxD#=P9eItKFmXYe4~aO!0dQ&RrJTjVR-%vc3adDAG3ifag6$y7}@7H$154pM-H z&dB2x9surDv;%1-Y^PKHckD7W-T+PbtRn2A2u(WW-(nMLI1b-nO$jBuqX-XDgeINx zAG8TIe6S`A8f2X{p-HFwoi?F{8^X~!geINx@3RRt+z`&oAvEcf|By|n;fAmwhtQ-` z{#`bqh8x0b6=83s9FtD@x7&moj?)#;DhJ}rm0pE2O?DEjo^vP3%_VcEm33TdMz#{s z_2*84EHeGvyod^&Vx1~$pz0(h4W{}p=-((`g@Y@XP8XMlzXUAw$2z^n3x7Ydzzlc|YLY;+^HJjO{Er2WL9+-l^A+r_l>8JY5-Io6yenxr z?8>}rIeBB=&6K=3?`}@skMk8sK9u(uCx4#z93`L2dxewt(Y4EY3 z!X+Ofe>4Jh9t9#eHjYHYSLGfRdY33m<;WLPgZu7Fe8nI zbfYyHOba_3u40CH@%o1=6+GgpA>DYI49)XCMa%9IWtg<>XDzsNm+0_M+JtsGXV}wt zhVb|vX}BRgDu>Xd zbA~+)H-w|qG)9YR(mA7-hD$dSm<1IKmwTzq!6k&nveW6LG{AUayj9J6;u{sYvjmq-o=XAX73Lb@x z8qzIn)EP2t1kVMwiP-6!j;HaAi8R`TkkzDfI-Z6b!t-(nO**IJX}BT0K8Mhxb2^@e z8^Q;22u(Vt<7v1dd@hI3q;oo+h8x0na|lg3r{ih3A)J%b@k~0W<7v1gGzUSvV2d&S za@l!qcVAUq>l(UMmOIo(!4L%g!MXnk{)Oi`;>XD+xNqckNt}Of6K9@+^`ZDw_-oGT65mfePM>RmUxmUum>Yvd z4VD|)q%J;*ScK4ZR=zIY0;m`@*m8nYxp4SH(ONzAx8e9Ot`-qq(haXx*kvhyqH#BI z_^>^aAvy8z2MWKBjgRTQfn$%F(SzdSwH*9O<|3?yEXnoN!R~W4(t0crt;a>MRk#TD zGquz6nYWAfM|a_BRi+jX`?t~OfBzaQsc21Le}6I7)nff8^pWQ$k~NlsKBHepsK&1& zeBTHB`?3VUANK#3<~9E-E}|nz7bPwZkh(b7N()R|1C)NcRXUyH>G{tp9p4a&akv0? zB`(1I%u8QoU2oC5!QS&UNil`0#RK^8 z)PTXL!r&2D7#T5{0pE9@a4G*ey3leZRzH5@`sdJEgysY;xD!+JdI6sp+;OgW!QF-1 z-M@dX*yTNo11o?$EB=fZK>mz_^=SXk#gA}+<@@K#4Rk~a#ySxGVx{LV)FyvPT!>cS zRU(vPqu@I2#6B9O_a!a}yiLWj{Mf(2XNKi**tS1zHza}}yXizuB_^7WO2Hnbvjvs- z=V{1;{HGJ|(;1fc6My7%q;ksT^IleUdqX9=ydR^*>4QY02DxfyLx2!=L$Dz{2@AiX z8UM4$$EIto;6Hkp5pm_ahW^G@pu%$a!6XvS_s+HxHzW3hN3pRgpu~ zx`IiADf`=7jZ8SuAYP)I!g0(fFVt|g;AumE5OhPZAwZzG%<2TxEY&rDBeKz5126{) zrNi_#YB!Y@MJg@oZK)Jr`utoAuU2I(2$*BpJqN*t0D*c;@HfqaR(V!-i8s9S@Fnd^ z>QM1qv|tREhM6WNFl=%rgOq@g^}!EBu%qT}z8D2wWln-@``4&PB_X^S#xFx~%O zP_a>F3a1neb>cua$oBS_-As&BQJ7kKtq=+~Q;Y z#k5(1VTpyPh7db&o)0-Yf(K|sdH|(TIx3XiXH=+F5?}7HjmDvECcnkpJab{vA&P-gRXs_|!$)%N~L5rdL2aP$OyH5vh+A72hJWgT98=w-nkb&gs%{3%)CfDqYN3px-r;xIxLhaGD`@VQb=!KamWP>sM>`M7Yx4{_*umv{<80=~^)?_=en zWhf7=!TvtVVedm(#`{=#5fwxmuF5)?Fzqxou?T-*L^U6>{oP1+Iy82h{|KGqegs$M z+ubX0O!Ezd>IW|6PI9CF1KMO^S7NcD$mKRLpk;{wiVgOY$BV8JqWXHmQ5=0#L!(M_r>?VC*+OzAIZo6AqKdbrO} zemgD$few>J9v5qt)|4UGY%=+Rk~2{18LK3Q>Wmi}j9wt`wE`F`Vi&~(9~cbgF&7wf zPm1yQ*kCY^nZTGi&z6JhP<*nk1K+Q*23Tt@QpeUVa=YNl!YVZk#IGm`B1%4(b`FgB z9HxYmXUY{8CAzQrv*>ULj4qN|BF1!4iHo};0#4HCe1nn?@v`Q?&){ey_$V0PsHO1e zOHac=L(jphBym$%rS|n)LO!;T}^W1OA#Ea0I#T4 z%quDDR!h8UBPp#CgYDnh0Ue4ZHR?@%nHTXX@}_bPapy#kt2a}36pg>&1XGidCZ<>^e)_H-=R%t{szt&Sh@5dGxUu3|79%^f=~-erRbmrvIc zE;=0Mw73F^aIG7$EL`qtgo{uKS8^>D!w7e=8X4!B^63DA=sz0&$}!W0Xy z1y&ve?a!ASjRse1Z6w%kIDdJ~BHS;pS%mw=HM_->pIb9KC%0x+-hOM=W;lO&%_7_{ zuUUlq#Wm|N<>%JS&dIHrmAAi|!TM*OsO@n9uom2|#)I2koIYL>%v0lm8uq&V?50J| zR@sCxrHOgKP?<+nsCh(VO7LA%C=dBlK{f3d8uy)0XC1?K>4Z-x)I%tZxKZaPFtsyF zf0)r~_%SpERLA?$rkD}*^K6&8m+~(;8iDBNJ^QvSScEV9!@3kDkA;7#e0VD)dXT}^M*ZT_>N*x5Xll`)Js=_Kg*(jEg3 zR579+fMyvm7vt{Sa=eujHH-|@D=caqe8lJ$ivfgjZAg*3cvK{nP4!k~cqLJF7LM`Sj#6JtIU5sF6 znG{ob^j5AigRCr$@@J~=Kq)i0(vWd$;M}V?h(F?mpOg#2@DEU* z3O~}PVbD#VR!%p4MtMEyla*6paK7A(BHRP3c7%Um)xKQGVyazjgv+hAos(N_D{sHm z{=hiRFRylldtlX$@DHrohv!!NV5aJkjCb8@R~ZhI7H?#;8@KggpXhI4S0G!;s(*K zHSA9Szun%;#P(+K5ZLyB;?2abL<5JOgoI}gSg0K{f{P|bs19Pq;8s)8S1_slhWIX> zgoOKkR>6yLE|bP|OC(tfy%!j%XB;Zf>KsxG)1&`u`3c0M(vvy;HaFrt%$Z6z7oP6HeJ zh&HyBWNbSvd3UpckBs^Y+*P7rq}PUeJE6wobNV9GSdP02u~_IUg*$GovdQ2O(+2%v zDZ6}Z*~bk~+HMRFr>h9B!Wb%sLek!?iWmJRCPEIKgprbHCV5DkQ!*xS`h>$KmN=98 zmy`}3ljoF<>VK%)f8t;F_2GuNPqCsOTBptvJZ$95*T*XId&y%JItzCfk`yNCwGX5Z z-*mYS#6X)~cO2N^kH%|SaSAj*I7J9Vih6tPQ}rCg$~t7t7Yi+l`VMcOdbRk%nu)$6 z4#~P-h(a~EONmIojERp0wGzzQWUzfQ1hAqisTe3?lKulJWW95#M5M=~{3Kci%kMjRfjn$_6!Xa%6->}~Q$ zIF7S3sm>}QBL=H>g*qD5;{LGFqRlx9ud_f~Ote;-d1X{rIz;noF?}oUhe4~e*bm3k z1K9$Coiq7~zJ#-lB7X?mRk(pBcQ?>(X(j%7i&Afi z^AdjBSzUNMO{a;TPW*-rEBz*MN8q=u$JKbj8+5$!jR3c)Rf_A9cab^XmAozV+ty;o zGQ27e;H&bUrdwH`&O1NFMCYe2#03rF!ql}XFxQy--xUAsu0^~kk07X|o(VrF~^=aY0LO**huFxyI5UxaASoA19kP(j- zJx{6Ui=ODk;ylsonO;ag(+jPm)>NHf!!p?h!oej5^g-rkd~S@}UC+Y;E;}7$%VqF* zTXD*wuE8)`0~WZL9v2uP@lkMj5~<6RJLz+fTf>LwGml+)KcX)-ew4Q(g}fc92UAEr zm_nJ$Q07khd}C++4=DA6{7)!#Yr)<0)#BX+|Dr3R{#*#L7Arx5$I0&TNB0&iraMq7 zMP5T;hf{FVr~gwtbml1fSytKag4YacDzV0yjNdCy6(zF#QnX-#O;WHAe}j)4+7oQ@ z$PpmrG6S)apD3Iv8ywcw6G2ZfW>WCa{g?8kM3xwEh4<)wkNYMT3mkgIuVXafrj~+- zt_+-^911|v;134PXrLs~!5S!;GFqeS+UKTCk>NY4|BsSkm`)y{KO)rGxCr#bQKcjO z^;b(h;UiHV!ZZQSyulVzN5B#hpB<{Ku$b4cNnQ;>4DGk!wwSQkZ`;K2g|4tFlGxiy zC|QHcR^lZ?Dp~XiG)b-4zkh1EMaL)^5O)6QnCdX-6@_9gk9c#W>RNR{*@ zWp{&@j5|`x@P)e1C1k_48nr(wssA|gZxj)gsB7ow+9e1PzM>R7O*UIfSwm2;#-VbZ z!C@WSia{Kr8dMVw>9Ij0blHxY?@+CqKts?^23uvZZCLsVQqUV@$brjhMm&nRnngpk zE!3!b^itBAbgzxLM7Nvq>1@@Y+6@ND(_}*lX9oJvjE7w6KVLiMoh8tT{0piK3io&X z7?DUd57THJ4qQeop?Zw+KWloRUbHYq5KAz1cc%NniI;U%ye$6VmeTLwE;L!A?p*(a z!n!kwvHwpws){I;Pt`W;915pBb5xf&6kbP+`nsEs(v*`o4YbB!7bzECRH9sb$>MPl zfz{0v3k|VK53V=dq;7)?{}GS1y-WdfBLG z!M$6ccb~z zN5`9x+Qs0@X3`$VZYB*=^=Bq_{Zl2KinRqz4V?%busiGCZ)L;eFrpDeyDTnV?bB)4 zzghgsFDbseR51b;k#Y7btoC^$N!3bN4cnivMLp@NXyilx|M8U4h@zIJX1h;$|By5x z{{ms6-zxl?$35a(2|V5mJ_%ekd^*8&gA&EkhZJrwZdS(9VTD3VN*4?M;aLg2Sl}Y#HsffWAr73n7T-P&Brw2`FXjv`9Y6$Vw&{C;U z=tM(+*Sjq@%m!WW&Rk%O50nkct-3H`Yo~dw3^DnQN5Otrj})z^ zhS9L4A^RP+&h4pyo?u+49=du$=m{1h>RD{)aj=@v8heBuk214dHMzvikIi}Ot;uDm zQAS%$tzq~Y(=>H75v1uDhN;|1TJh+CnsII(X;t$a4GV+8ovkF}c?w0@frj9MN{%Cn zhv>=BHx;h!6YnuKo2r7#shAsyX(#1F2rIS>|D+HLl*@xCnpV$|Oqwd26PuCfCSP_A z!%=E1DxpVHB?g9qeGOaq?YjFq$3pC5E2vSd8B6f7$yf80WpXanH%;W^W8(0sad?ru z8eEs6YiiO|(;h+?lP_zkg&3x7!xqc1upD=gDp}txfFWn8#b!>tys07k8{YTFNmrEZ zl{sn1{x}uwN2XJi%9L3#uOrPe%RzoPzDqoo_cnf^czjpbrIv+@C9# z*fh|WEJ~C>G(DC8%)Lt`4PBtNMD$(?^rffru^Mq|mY0r0jdFxK*&Y;{HROI6=f)qa zHv$G3I4u$mFO?C6#xC$S($TUomc|yNP7Cw3rhyOzKGQI1iD5$sgV&`FHf2))gkPHG zZ8s;d;kdPbJY?;Lk6)&q5L+czYQ!VhvPA2LFYjH9(NF|-pjzs$=fH!Sc zX9+gD+v$|RcJ~h2JGsMs9I1$7F4Jn!cRrLppZMWO)+rm2*vPDq+Dyc1^XOh9Ngr?H zK+)$&xpVrW0%PHjYX+G$h<11ITfltZul$WaV1%D3f|N+@!~O_sL&NC8iY2(`3<5KJ zlCci{aO6;ihbJj$o~l={)10pw2=ajwSv9C=_>9UZL35xlL`m8@3ZGQD{n03hV05nf z?*A~=<4S_h3jg5XoF-(qU>?IrufbkZJbz+xlqvpWouMq0u;pJcY@U2kn^qIB>^flq zw6#DUHxrBQr8d0R{hn&n@6kOb@n%HH5f{EUn~nlcB?f4^MHkb)>%};PE7On|iS%w!^GkK04=w-&EMWC=GN^-3u_3oi(J<06MFBn&>kSKbNqEmXO(3 zMVTTv%jpDMh;jIJ3wC=^X0Koa6Nz4PFuM?mgw57DD9g)@`v#w_o+FqQ#(Uf*=!ir1 zpFQ*Bk-UT)?p3v@4?f%bOVz9v>_vvjwvqn^AE-mS!WjhX8dMFnRxjl?7E3Q8C12!T zqFU(^_nXvT=r71Gm-ND+(~C{$S7d@3D)&*tAqt6h?O7A!3>AF2Sg=xY1HUCEhrhHK z?rmKrhL7;pogur*X(oFx2nP>g0i^^a&Gv;_8vc$5#(~dV-79F9>k9W4Wm>#ZW1;d% z#?AD-|7M^{J7R2(Y0yK1pBT2Z|LCWIdgcZ6EjRw+`AO(5nMSFU&xTC-&l`H$E)xqO zx^(-L`I5;jN9K|h>^sBXD(KSrec~L1Y%u7$|SdbSP)~j0C<5Idq_yQ3g=i27or4 zBSGYU=4)r`wT~fdI#Z>Ue~Xgr-`Z4eA-o^@Yn0?OEm4wvW0WLX{TG~JJn#rqYy=y> z9-r|ITzJh`i5i6sGHns^k8;y5=`15cxTaGZP`8FP_@XJIR#CDlH%5)SH0bwzI4r=? z-&4Pe0rsnFu!9{@n@>kGMqD~CzL<9vSM#VroP#KvyYg_f%;MFuV_v2xY3CW5jgowO zfKG~J`xv-2xDpH)8b~CuB>iHF$&E-5Y}h2RBwrk%b0ZS^Kn`+WXR1X9uBO9MVwLX! ztBSDp-#p;CS%shUbE^1s5=WcxT@>EN9WL$m@Dl(zVG$sUu)Q4nSScdqlNCgOWYkgY9;z#)X(JKA%8#!X=XdsU!N;K)=#{+UaQ7*xUOA(bVB390xSAvYY8+ zmtkcFU=#3{Lxcfd^3TFZp-!n1pp_5?KEP5Taa2Gz9z1$Q`L+Pvh`X^DMWH5jupE<& zCsXlczLiXLI;Eq#oPzFE2xJy2iv3sMh$3A7jrs(rF_G{vvco8>0GS1&q{6r=Q5Hv( zlm$(xszjMXps6A3%m{-&RSTceT;U7pFW93281a{gK2hZJmdd~Z35i*J20zg;axh2g z42@Z#0lAoUIs=&Sg>5Q1K#K=?wrG@G+FNvnD#cNM!5*FAOhpZ*;%gdl5-yBHg~j4? zkvR7}HMF1S{x=Qn+#j8wutqY#-2p%_vkO!Y&h^Y4%gyvp19WA zuCBq@?)@IecEY=Q^z9aHDQv40Kd7++Kd8a9@B&P`!lKO$TFLd8(vO5cIPxbHVf@5p z4)6xsz1?)}#BT34`rsdc+wc?I0Q}DXy#z$eIiK1D{}1jmTm|G3Lm}36{sTB(#Ge_* z5QZY2xX!?>2*is(o*ab#( zEDU8)5xIF06$)Ri%F;LGm^AmiaJ$OZH|3ag%D-ug)HP2N4IE@oq2D2HE?N+r@WrXC8ii=rZ3)d}4A+ME^lz zIAU2FH2mavdh%p({2V8tkJ_Sv=sNto!Z;|ACklHz34M5$v4Cf%1xW04?|$)gzd=r7 z!3Nkm^9+~JD9yDy{f;41-JClSU8eEdI-0F_1vd5qvB^l^VuW3s?W zSO@xX(*I^i{!=4KC<+Qh353@-SE>I3%uZIx_0*3x5O0j&UIO9x_y#)NC z4<~VpTW@?+=_K{FCtYsRTrlD!`kHGnpnPBS=_dxkA6Qpvi1b>6?>I?)al}Xigj#nh zCc8!Q45REsVt&;6{YHqp@ngpL(S=ui3nw{cfy5bP0TZPb;j`Ye=pCn6kUoRO@QGq! zt=NqbL_AKtC;5S5v%^g`M*@s6>lJxoLWh-;tWY^aEMVe$20bG>uY?jK0M2Ee^nO~$1&}#ZVCN1+fnRFqSf4)iQiJ`DF8BsxA$aly%d?~Na5@YRG42CVtHR*)t z-(L(EDu$1S8|e{h-9KUBzF=^n6fEoa68-v#MW=&xRb(N`O1CS}NlE>+I_yh-m_6JA zCZew!wTUO9?>r6Ye)GYo0gX0mLs;=#<|Lx!@q`HSM8RM*sG8reNt!hcClM`HIHGX4 zIAxWSFbf1wpXlWz^}0ZRo6c-k04KyC(A#WHApL(jyBa9DiYi?_Q$0PIgiOzbgpfd( zgaiplJ4B2DSxq1b2|^M~*o}*4UwXRV%rxmgUw0=n$g=zhx~LfOpn{RjDj>qq46+J{ zun`3TMH18%MLmeSkh89uC}8}9{chF0^l^f!di3{t zUYwz?!&ZJL6}|R7PM43SaFY9PwZ0DrD8p$TesxOU)$<6TaHlVNs6Nbc_0l)0^laXU zN@pj|oQZb{DjnHoi~iYA$sSu$e%=Co65OoM?Z~rM`LYwVL;t4ly6Gw{TUCk4CU`8m z_Hgs4GN}zYIT*S;pW1`Lm+X%-It}V2sAS=$G|LrkJfou{^xzzr!U;k-p_pX*7E3u{ zkxGZ#wG`!l9x@wlp>j~TQAt6t9$Kt2FJ59D{cEZ83b&9uuJ?95(iJTY*<3?8&Yo;zQq54m_d<{9258MEPf zEIbtphp#}U>G>FToSYYaJNyn`VocL>b1J>n#mk5@BkuuT>dv?cuS$pW_yuMHuhQYX z67y}E_D0FW$eccVwf+I-s-2_fAnq6F+-1gzjUIRn8Kn)UjLY?7sTa{DhsYct-%qKG zoN#vi(HS{P492{nAeryhgCqL0J$rlfPiEfIpK-@&#b}lonivR&KjZ}E^oo`Gm+3#I zT{>`&FnSK+4gryKt|&M27DZ_n6h)^#JqI0cTNoYf9WFkj`*8y6ja}Dcjub^t`9#Wm z^qr#?`XUfb=%7l6Ck`z)ClN)FIY17>*uNVUo?hKQq!-_i)?e*?tyh0z+H-ySt{G3D zv51o7eEqN8|Iw{q#JLQZvN~q1@9KW0TfeTa2P~)bP_5*-%kWs3%)lG;oj7t%rK6|o zTyG|TTT4P_eKh6R2K2TYyY;ubf28&2dw<)DUmAV4Pk(yG*PvgNXa~Mi{yK=FXa--y z{u8qLH{+r3)0gWr->%YUMbevwP81FJJGKi|nwqBtm@-}Is1A1FCwup5{Zx8)Mn9N& zB@>Oz>XCgkTfV(Z@9zpJuOZ5xW%P@N`~yzbH2p-^0jQO_G)!QlB@Z8!4fQ< zLC-b_-H8jsO=hXIoast#qcC4xfqs(DTUh9l$LLQ^KU|pk3F`!KU(KfX;hxM(4o#=) zqQ$9tBUVU1oWii?ZX5}WV}S3``azsc)tQ_eS4G&nbp~_eDJSlYdRO-pGCuTPNnpO| zeZp{7&%z+0;^LR<<>yAy%IxHrg;DT1sYK#a)*#4n059#xtBO!V?#=4H+*D&5%) z(PUEprR%nCef7Yt1NvJ7|AUt%?{RVO)-PmV!wZe89I`+^l>IS~`{_2{R2J}^`7o%xl1eW3pj{ra}-gIT8~@)Gi- zbrz{Eb28APr+<^yhto&W`d0jKP^HIR+$KG-E3FTuznIbYPdhkGKihY3x_-3(r~OgU zA^FjI{>ge`!tBn#YZE61FKJz@i5gV#J?Z2ARFLvQov5eIZd_hIn@y@$`@=ZYsopC^y#akc4mnWiMGi6Q+^QeeQDI^99DS?48)jMQ7>qiRgi9_4J;X8k z><#*pU7tr1S2{eS&;L7>e#M2C<6O-T!A2o62gnid?7B*sORFV%>2iF4a1Z=91P6iK z1mrG9M#`wnp{p(19qCN>DfSU2f8ZJsHl(kiw@4BPi#+DyX5mPw{RwdzaW_HH@oPg# zL%FD&ii?@A7ccD`GmsdgVll5et>)oa`X`aocI1AHUiZD<>N4aK>kGlYC@4!6H{Y3Z zFQGruWx138rx2yp;n(97M*-+kGnGhP*lj6JV%)s|VMcnNo>Eyg^l1i2FfMwRMRIms zY=Y`hds79*!&Ma)Z?`}n{s80IcUXYmsj2-4rqr=& ziXL$F+MD_@?t~wT;Rk_}KJpR2A25A4@<;UhV)Xrt4;`?CB7ZjHkH_d204Mor?zk7R z{9(pduCxG;d)4_YN5$m-J>&Ad3rTmBObKk`BPhecO z%)XWJVaA8nTHx!9uLe%(J{wE-e8%U-(!EIJvmBnWsyySH&a;9~XZbrAmy@yzjPC+Y z>B>P^;{Usu-mUg9{X>k0Yd<#PmyEmR9j1ScE3K{F!Zjevu!M2hi~32Hvz+l= z>n(6J{GP52jLUa7H(OZ!1LN`*qmLWD2IFqghUu>XPIfpL(-UtBOgj^<>{tfu!Zo<8 z(5!@i&~*>vGL4#n{L}UCjJrOV=?^d-F7H_SgF-J0FcwySWISAru@U`gtB-7dy^ZOI z7?xi`569j#y4HmF6VZ}_c8uH z*5`i4Kc*5v+zsV&L0C#_5H|xODOH$zc79Hn%2e- zW-a{+rbjh2SKoAtpAo|!VEMx_{0YWSW?a&J!QdDN(twRF_P0L?ev5?#p9Mx5|Aen* zZTxAB%L}KsayzpcIOX^GSh^dSK71=>dsicUSfPt3_r~Hr($>)>Id0p zR}9ZG{=pc2JmXi!@Y5Kdh~aNzydJ~PXM8+{Z()2}46ic2HHKfo_@)^CQO4KB@LL#P z8N=@YPJShmK<+lxL%=)DcOGZ@LuYZp^6g2+hj_o1#Qu!&Z87}!z{&nu)`#6+y>9TM z)sZJfg1w&v#YoRRF*(bCr_?NUC`O;8|3!v=P+fjsyL<|D%C84{r3Tc?x6?hm0^nV$ zUw!wZAx2LVM*cBs-Ba!O4F*3}t>pTZ<>*@sK3Da!9fa!+##Iczm*otzoEgmUP2gmo z>?sz&n@Mv$Yv|{wbv#dDa`lX%$G;&#?EM#j2HPPMI-G!Gxw9kvT;Ky8n$O~CyGyqSQ1#Nepc*IFYjg`d&28~7so*!KU6339#;e5j4m2A)lze<4Bs zUlZtO;6+a-dmf*FpO%213A~e@n-l1Z3HSu?PWJhD0{xx@{HqE0BMJC{1pLiCi^M(ZawDM|CE6La{_*A0{(>r{665F%4=T&{r3{+ zpG%IeB|EyxvA!_NnWanElQ@^#-E$u~*`uhH27IXD{(98`hrfcR$%1rGDi#4xQEqIWVPn4P~)XGK2QK(fLt%~o2 zZ*JzBzE|-pjnZYlXz6>GHfmL`8B8j_TBsE{+-L@+>X?B`o{dmzD4x)Zx2*M6sLE!h zsaX+0ey3L(xzsN-Rmm#^J|$Kvs>=DBH*NsZs@C&`?W$n4C{;_%Qoc-z1o=|K_ngr@ zKL~1p2L+UWo>+1XVJa9YRqAC3#g4F!Tq2!q%ZYepzdF_&SEPvNM0!?{=49O$wxVBe zj(ZKi*#<};-zbzyUaeZ5jFLpqx}z+YYdgJazS>6UKu1>?_Y2#-LMbS;%K5-^I2pgt zj51q9%0^S~JRvI4QzH)BlsTbFm_(aB z6WpVfr#;lhkq?6WBpf7KF3xMB zU;`FMYw3^HNa?WlT08=jsztxrRI;?W z(%4r@%HLpHK--D5o6iziMMh8;gGuV`NSBFDl0E8xRHV(1cGxP~F1JOrr)evIO|v~_ z6gD5BUeZ)x%0;Zc3jtehZIBHwqss#+|n95Zr0R9$?wqz{lm3CmzL(MY@Fhe04j1`OdW}YC3 zZ($Ua5ByO@gI2$}^1KVypL4FaEVnH87Q@7?9zBm0;8($RW8BujLBLflimb3 z(o|ruP@z5vJ8pMYdUNg!wVs@b8yPc#mYh`l=6J10$@mimDn_!m^44#;5ESD0a2Im> zEn-nOq7o?$j6UG_uvI9}G%enad>NxM$u`Y6ylSiB0P3srg>e_A;hsr6DQPYMIC9ID zF4Uz_tx3nLlkQ$&B2QJp45*wH*jFf*X3juKng)Eu2uu_8&K-?9L^el>!cIc+L8&=O zLGnpcP(_~Nc$b;(k>cQaJ3;4G|o28!yzok`yc2YOmDrwxhy(HJg3-B1S^BLdneHOm_%gE(91j zPfZ~lljbku+r80zsq8F%1|`#0UT}A=F(xmKR^8fZXFf35baIhDlN*y0niobyp>TGP zHle4%9qTpd+^i9qHMMyvE+H8K+TJ0w&_%Ph|@ zoIDb28Z~BuZtwQkC*I%zm`iJ;I?!qs8@$}ybek^r3{pa$!zTA(Ha=2(2I)y*spGzPv z+9yFQl75zi;gb7yUca`el_Ae`zXX3c55(zzIj^tD{VpaJ{Usf_k0buI2>;3aa#8M^ zm_M#R!)hlUNVdqEV1BvN{y(}T{doSb;<$4Rl~Ny^Wgq0e3;RCkBK~9W7w7*FFtRI? zg^L(N0CACjMK94qumH=q3Im*wc6*5=rLZlQ!ny*X%uR zFX5N_b0EX3amO$F1CB7i&{=7r{|f#RA}IfozUC66rbr0EurleqO(A7VY^Vb-^+ZV9UG6)^BJAMh%K5U{9{bk=p z?>FqPIZ3h$oU^0r)Qbr^av*C-(v7kCCZ2@3Y9tScLEiJe0uS*k>tfq+sM~ z4=MG=1pXHux0G+fzLV$`=RYfff8ZHQdD(cB+uACMtY!YV{XTS-E%?7(5~U9LMURUT z_J3Hsgy{*ghO@YB-K#?lvlk{5Gv6Sd-0 w%m4fwOG(Z09{h)* z(l)K&Hq3|`5fvRr(NRQ4F<}chjsl|MG8#ak3(&zWEGqr|&b?LLMN8g!-}m|c|L6Ih z$Kt8J_x#Se=bn4+eyd2W$S)kP6GcJ2^ulN%ObLQbVGQ{ojM{|ZLM%AFg$yQZP!nGv z+|ZSSYABdFP1=kUwbBnREBg(FZ`nDmhT3#2zZyk8BAlJXaW&MYivnxSJ)FPh&zFZT zW{e=zlMr2sojODbvik@`_{im z&^7gI$K3x-ev_4ShwtKgBExj0+SO3a??$9kUBy%W*T0H8g(*sY?bSoQS`SL7WY2s_ zItZRHX*#*PiEPm;*;2fdLK8pVQgO}gr;5%E>(g)Ey1$NZIWcEg9f^`#r{N~OWWQAq zTnN}j_@3A9N>j)y2KC6W;}tkx!GJ5_Y3<101Ou)luWv`s=63KD6zEENHn)Sn*iOEH z1?v^cHLaa|r$hEi`F`Jy{)Tqs>)WxDD(Fi3e`?2`gYEcvTswM>w`0$ecJLYP*ml%UTv2G)J&||hwV<8yZf-}<=k55RV>>wYDQje=pucKIf3J4RmE4Z~<^HQ7PiqG+fc;k*H_kEn1fix3hT$3wnq9@k^A=TC71udRY8}PJ zLUF;gqT;z_wPo`v>KtXY(~8DbR#%lxE16SS#(7uf6qnYQ6qi?2l~h*TQ6|*RDV|?a zH@~>5Y)NrxNli%zr~|jS%DE^6l+=|uh^Sk-XlR|ImZEb@93{n$r8Q-9i>v1>C@Xao z-%(OoSzRjBI_E4ct}m}GV;RgVEnZYoyHHqEwrElHVnD=Q#3YMZHUMkOE321~&a$e~ z;(29`;)<%dW%UGguXXD@q(?igYpgrgUL(>HLMof`Ew88_T2aMZRayxPi>pc&l?g~zTRNX9LbSNf zIY;qfO^IVZQWi5=N>6O0FRQI$6uezAmtu=10xGK%ii=Ch)Z#hCiO3_jX!f0OGRBgqMMK#59oaN=b?7XhRSn-kySY2A} zL^+F#$%dMx{iG!LK@oH+TOVP~WI;LO_N`N%1v7mye>A)OD^5(&e zqY~AH%_fHp4SB1qzLd3HwWNzn;9TBNiB(%tQOD{Sg&jJlVjdYm4o1kt28)aXQL}vvf*1SfiTbtXv2u)|J&V&!Pt?DF9YhRIzMJYL_ze%4@3^6)!HS zbe7doW+AgGD?@H4vlR7+QX6CPLP1r9qqwG|7Wq)pvU&%SmpZCZ&XRJLNp0Ef&Wc(V zom+;UT~@rPdae_r0K_i?(D*T45L|i#>#rIK-c~~b=m6^Q!8)(f!8HkZo1orFAa6`e zb#+ypgSs#g^D3+7lvJW5wH0h!BFVW!F@zAaq^`80V)z^s4^uN!bhGWA**swuibm*XL49TMApH36k;^DadKT3$$ON!VBQkwSkg>mC$`oefPM-lr- zqWC3Zhk66n!s>qa!SxSgn;9kCqF|jWD&lDuJ6EBT7;O-$6xwlBMGWm~X&+2FoL!05 z2@Rm>QqL{a^MbRP9ABrLXQ;UAI*xCMz-^bsQHOd8q1F>3xrLoc8t|c$EDf(BM5Z_z?}B zp}|`H8_?uq06Pg zl_tV?Ycx3dLcP{$aI#6gHfZpWPr+=|;An`UYl{Yt4WWXtRfETA@a-BL9UycyY4C&) zDhT^DxTL`kXmGWU5&MV+*RJDyagIhHCRT|u?!Mkekz8bun1|OoqQ#AN+4W6pOZ5q702DfW)s|KH}!P7MO z3=Q5>gU{CBy)^iI4W6#SYczNt4PLLo`)Y8P23Lkc#$BVq`)TCYY4H9Ue1irbsKFaG z_#h3wMS~C4;9E8L5DmUvgJ)^*CJjDRgYVPe!!-B-4Sux-Kcc~D)>N-n4StOZA`EEo zYc=>;4L)3hpV#0y8eCAuJ!=0WG`LBFt3x)i%^F-8`dKQA1|Ox-W7XiJHF#eQK1PEN z(cm@>K3s$6YH*tdAFILb8ho4vpRB?2H24e+p0B}YYw+wzDn?icOuGbX#w`1%Sw-u5&(bio65_iocJmT@q<-!ef6W(>fu z)War_{Cwib$AZCN`8r0^#zsCArfEYX?+nwlk&#~x)3kw+H-%~1xXAa1Y1**JtHLxb zF67!UO&b(>PMD^Ri997t)5cF88>VT4A`cDIv@wx;hH2W6$Q{EpZA4^Un5GSg{Qad+ zd1>Pz9}Cm8;gAo7Y1(MWJHs?>FyxoRG;J*8O<|fg6!QIHnl=*hsxVC(2)Q;))5bxb z6Q*gyAWsR?v{8`9hH2U$$V0<4Z4Bg|VLFxQj$yhx(Yi3*gXr&n4VB+Y^sz8a8v*%H zn5GSYyfaMG!e4$lOw)p2-V~;3<1ODGrfI`1uL{$&5s+)cG;ILnIboU>{_>PCO$&Z` zY?!8nzC1Kc)5cxy8K!B$FLw;nw9uDzVVV~B^7p@l%1;Y>`B<2y1-*PIOw&SM-WjH8 z0WZHCrfK0WZwk}2V3+R?)3i{RSA}U>pv$#knil5roG?uba(POariHjXHcZn3Tpk*x zY2hvR4AY~C?ii+Np)Kpe^cbSQ|2b5C8_~zYG%cv*Lt&a0((=wQO$%uGA*N!Hn_sm(!yo=xZYAK@oKC2)cU&-6eufh@hh)=u1m3x2G+F{x*U>5kY?u zL4OoMzZXI8ji7f#(Ay&D7b57VBk0E>=!YZddn4%85%ls1dT|6@6+xFr(6>dD=?jAvRiJ%iA=%@(#(&C8vkD$Mepie~5UqsL!MbPg>(0e23 z9TD`l2>OKx`soPzu?YI%2>RX#dUXW7Jc3>vL03i43TyHawdz%!5xUV3-otAwh z(9|3-c>j^>{n*_q<$HH}-}mMp&hs|qHlH#z2jX(w$E1w25cTFCSa}?wN%91+;>*wa zCO@k!KkKX9b1i~P5FF9&_zKu15)FXpTR`g=E$E0Y0Bz#5f#@92W?yzP0zzSsaFwzd{VpJcR&Xpz)^3fhX~SkDsq z50Ey}W)dj%>mXf7Pl%Zz&ro_8;rUGRJOVSov64y4d0em|UV=Cl*+{>Xq>($N7i^N} zIz>t_NFFQV`JS&F{TS0s;@6WnECGUAlRPsa!is5?JSNm~uKPliwE8ZrwYQ_tLe_q1 zllT3$UGO%&)7rKRS`UHIw#zJeNTRI;tVTpApMh<=tdz;u4rUT%VxUY?v9v~<902Z% z#I;dUxHgMvMPwp$oZCeukvvZ*aTDSVN?d?}cfdfBaRHyD#HoI4h?BP^Pd^@mah-_Y zLK#V(Ur2{ivB!9vHK09e9P(S#IP&qSah>F;LL9n*PUkU~$pp=eFWAVt6%4U5M8{Ix%n%!wVT~Z4Auf&>C>4LLRnSpBf{Bnc zCL@T{*{n>D#1Rv`!P4dwz?-v4!;8 z1V>}rZqf0GuXA6Bcn`Ywi{AKDiZ-7+xbmoA5CwP*n|J8e2~tBfqLh&0rHHrfwn+_t zAn>D=Cy+yw*H7gC%ZT8z_>18RX!vht(Yi?4i-1`ls`KgzosytOjE2clFk`5;Jl zd=f~j{5rAT18(qUU#f!ay&SMn;%^AJU6rtTvp*+bngW<0@ecy_pC|Q45GCi!4}rfO zW>O=jhCT+(%ym*HHN1m8^2+!PBnNBdzcca)A|WkzV&wfQcPZr(?`9-yZUTu$X0=X5 zBXgr!pe=dsL4QNkIH_h*C$AZ&UfAH~S&t=W+lhd9xb`=%Yy30Mro>%QVa<4RDyeMe(~8 z{HerOs!A{b7)!urRuR`C>Oun|+v=dR9aG5f!W;15EiFL_&f5Pe%TQNUGge8M%>27s!oV znSpY;8=$wqxKE z6*r%tGk{u**C0w&BL9m^v~h`EB=HWD=!7U{c8tl4G=iiSZEM?W zmKuCIs)3*83qmr9Y+xeCK*kd3W#k7SI}&*ZBX@&j$J`4+q6*&RIlcr~p249fIP@4n z`G7Db`lcVwZ4Pvb*_kVCYjOuf_xTv=@b4i|DM)U-M*__r6EyfHG?Bx3-BRhgLQ2~xH zQV)`~`-=vt;Z9O}(S!v7i(tj~Hzss!9v6}~GxB|qF{FYfz!qZ@NLuu`FJkI@iJL0j z{UIVP#x3AQljs5_`VhztOrpo)H21~9(%tl&8_N*%4$1QXk6WpCjbQS4X_h;`KS+0c z5lEA4{fUN~5s2fm%2A9Q4AKQ!&I0Lv4@2iwK;$H%;yWX1l`sBC@fgI-^4Ew{OR=!Y zsKqD%WnS6IMuYFl1%aLZ|BaEyK+-~&%{wrA@q6(6Vu$~%;o-ZuP_VCvsuxn6?lJ{ zn)Mkfh5M+$mzQp_`$itIyLX9!OHfvjGc(=1;v}-4wX1xcHgDCRq-d($`!jzIIwr6W zsqDTx@l15q`Tg=6WKgIMSf)57D=a-RmE?Wb=4GC+m$tEH6By+jk?YS(7on`c`=Nb< z-QVda6iTo+x9aWwtE27yyP{2j0DS7!i<7*S$4&I4m3d~?uD7XQ2L1xaY0A$FsaR*W z`;zC{eTEG3l--*j$ZE1TpX%a1w!(`?wij18vhsWx3+7-6owM6{4XQIR*WRB?<2Z)W zxObcF-Xq)>to;P~2ZBMQx|*kQypv-bh^S*Q8hs-oI6AcZi(22nIEnuQojsjF`KQO(T^8SEP zAsPoZR2OuXGK^J96mN3an)d>Yy0?4J1?(sSdbcxHQ?5eqF=l%9&(O~D9ttV`yqecx zf%hl7KYQb?@YXc7_5%a4Mk(}uTj1S|f&_}uFAIE*bR3-QjdmJghf|^o<1WmdHEY*8 zWD;0I`CkTy+Y4*o=q`OIQ#BjAZ-ph$k$Lb--pGs~SbapcPrHAD#g5}9E9xW$1A~Qz zu@1XJ{+o^gZ}$YjKEPQ)?d1SifFleX_TaS?=bqW#=ntf7OBv1mMf zA#3+8I}&JN1}-~7he4l0y4)^RwMboz(GZXl$C*xd3YAYuFFiEYN)0J=vVgwK{DmIC zb8zN+$E8GwV7v>b?{#{|WprAEFxMA&^wnAKM3Jy)1{DsT)8YP8RNMUD0*bAjKHE`n$G zjx!ZzerwPCKJd08auOmW`ZmjEoLPPX^R`+E_h2ZdN|4tA#3=B_EI}YOK3Fi$6!?ph z)~%*3qqLWgkLr^IG_t-l6Jcz+R=SUtY?!7BydMWnkP_dreNZZGOK!`XEMifX27bP8 zP_FwMQCgD%B5VKY?u9w$rG}4D{Cw~E+{~tY6uWTw9REZyFGpTcn(Ln#FC6^YUb?Hu zH+Qdz27hzGIYT;i7w2yBfCUPinAw)?EA(FU7VQr7M-TTE?Z#6J=yJY-M$os=+lmoF zJ+I>JAkV)rh%A;rTIgFuNq5YI4SC-D-864rdkp3yb*M=cycRqfnPwiYut zIk&@JIR|XCU(DmO9nt)(rmWB8V#G9)1^8@iyWIb9v)9?g>!6I)!Qns|)t+xz(`6N4 z2NUDQKA+&-<;~yjK8<=92@D4-r+X{od|}Qpso^B6kE6Mn2lGAq9aEMs$oDT1b925} zv0$t}ZkXNY+;{MTy|lT|SBX0Dwvcb;9z}-dusWqICYznF+5I(^f}F#Mzzn!4a6bx* zCr!}ke1X?WKvOH~{GRJC4kD-J4`bY_BV4lE^(~^oob`d_~)-noc2K)KRY3l84n#7pyqhs%)LzSHr7kI$VwFdAA?0 zp1>|r=yPsY&HsQZ-`gyYgdD4-4)7R*&Ic5uoJ+QOhy;ttd?61eai7BSe}dy#Ud^BcSOdl>X(Ci5i> zD&PjSVr)2Z3sRsX^tAg^k3gV&-Hs$u0*AZF(O%LD zFl!w_!#jXGykc<{=I^v;2xnniX0zRW?5sWKUF;%O zo}ulB-Mu#bJZ)0}I!~`W7DU{hdDxCqch4H8>aKLG5l)`9(?h^}nYNz`{bdLC>V+M& zUkQBr1uZd7V(}FCHWv%M+BKNjgyX^j{CQ4r5ZNpMekA#KNi{A$_ zpajj??)|jT`yCc6fz1ja?mGPtt!wS>!cON=FpV`0^adUDkh;@*0xzSp{4f0VBrD|q z6@RV!jlUkg++Wl#1D}45%AF2l6{o#Ho%}MVRYp1u?WG;ReFJ5e`K|I_9Dv{a?fGrN zWq!NkOZe^b`HrosXd}XxVAGG#N--NdfCAslbSrgDyANG7=fE->_7&JE4WLCbwkYGw zBCSG{agO#Uv`+I+Kk~W(MQ3e3uz=NU{t=8=r(ay>zD{)XFZ8E(Bt>t~jsXK);9(T2 z&|8XyDOC)Vk29bC6I++y5yVwDVk!;pu#z+XiQ=+X0t$UK)&j2~y~yiGxBehi>vN^V zD1;Nuh)gmU`btd`{Ug&0yvt32dX(7it25`eL_r7U@bN4Ytb{__8|-}gqYpUE1-|j- zLhlc<1wLc`^wpufNA1l=4FLgG7WkH1PJha`I!&N49)4Fq%hS->)O`j{x(Nf?;@|Yy zoR6eccc4*V5tjAY=|o;%yZHDvm+;HSw;LIBO3bA{vZ1`+6%sKNxc4aI8@9+D?AP6H zkyiDi6#neA403A=#=2Q*Kg(|n+>WCZjAFX!%_o$|U)hZm_fr)2##lWQ ziupLYBN`$!UVi|~pw|%A3)QEfIuHZBr2j@pWPQe+DYsmL`0HYCcIu4spEfJwG8Vr$ z6RHL2Ad&`-EVeGbd*l=}H}Xy@qxZ^x1E05l z?{}!9oBG3%+eB%HA*!LN{%UF4jfi*Z(r~E0Z*!|5<^TYl0wfc#Q`(lRSEQqwk3nJ= z&UR#-9j7eb8_cNO2hl{(Xagg$JlCwi*#4Dm^VsZe3cUF#m3Y(-JxQFV^H^4YErDL& zQwhGHHOcv3$k+x{gt9)s`V9+ZI?T!S9;QLXjuqV2JfCs-6sYlM-;Mph-FLG^JEvhS zJ;A&;|83NFJ(m3|&$>>5vgqLpeAfgA5eztbM3lsvdX> zDA;eSL5I^0S`YWljyPHT{T2+kvG3MCLLgw9a>ECS(C4-mW&WDq@Rj6=XC<_y zugTxh&?I?A>u`9w>}_mPPh;|D-a@0e1c zw`_y`Tz;cq#a6r{KWFmhZ(!~ER|EQ)uWUo1*V$O)y#zlOtA`63BvCI5*E1i~P$(sunZUxdSst zUM1b_K~upQ`WNX`flYY8tQ@1Sg9bFW<-IW0KY?@hbyTbJMKn@n9Fexw_saFH?h946 zHOqF`8iG=TjX7!A+l+{YW2md{Mp)oqj?K|2IuG3Z12xzDEwJZpI8)A|G^kIMRyMP- zp8Fq7;b+>jt+JqGSR2E4PF-LzS~zxG8Nx3D{zLf@B=y`11t?e)5?o{nL?p;{EsL6o z2L4$O#QCzc;dPiAo^LVS!B0ivkAaS;pd#!$X9yz9US( zMD*e?{RGkT!}NNhr-bR1>v-$g5BKlQ-yz6vG0*0yt?F6ex7+c@M!N!5-cW$#>((TV1dFvtQ}1I!pe7 zy8`yWwMKN#71G^4*o3$7*5qSua_;6{m0yJ*4E3C+cI4|kXcA2>-{J^2@Y!K*EBpTy zg+6B^EZKmHge52JnR^4>KEgA@Wjj!V>rjK-9oM1(kci!vIue=!AESk5?aw--jBiIk zQt@R8a=Caej5$YIy$Q*Q2ogaf!S*pQh9Nz$E~nFzJnvpup66}O^X_c^7H5BBn&mj( zsO^|_+$Y8~x0>8X$K>unyAfi}HlON9+SNW*?rPc@N0n zLSK>h*CHJEV5C6J@UB@To(V*uQsk-3q-9$ICe?z+kuCLdxy#Sw%qHFdkO^{$bMI?K zn*f?iehozBy7##d%UhGb7qK<@d*pTKwF;l7M7t#WnB=}DNbZ3KxdLJsb6)O-n*S@Z zmcPfqleebqZTWFT_<-~l+GxPshL!wvXkSzWYFYkKMZk-^%Q0EC1-|=9%UP)5d<>4# z-ZlGYALck;sm1P}AQtAdN^UQ#leSF|NA`1MjvVY$de~p1hImg_a!&IP>NM2PAOVMY(reR(vIi?Z_phWLQh=` zlkGY0*B++@`ngZxC>+=Bw7-FBrORd)cS>tEvJy|j`y}6x57CH};z(|qT%~P8a$O^@ zh8f>EZ-ZghLf`FF*t*58;me%2Q}OX+3l0b@cn^aeTl)LPS?O%c?z5p$UVVTLwf^}& zUpqgA5Mx0)M*o-4i)cCB^B}qk{h3bJg~6C^nRh}yl~&!!#?-0Ew-$|4@cn%NM00&b zDgK*9?*l|&J!HyxN4oD#(lNZJ_H7iJ6arXrFO_SpB9)dxjRBYQ!vR>Lrx&h zc|>fkOCuc9|u}bwIGy0Qmmt{up>M%2I zp!vnMEDc`HLzHRpjNp!>oN6D&{+haJ0`<$4-=S_Y@xLE0>ZNnZLnA64E(}>!iP8CV7UmuQOfMiGm5IpX2Eb z(!IZ;jpwdhA(m9u!Ki;z(|5NRuVB=1V!^1aLvjU53n{q!PBR9Ex;xmA;Tjo79m;t# zngd!?4P#a2y2i|MmXkC!*(?mzcbc#FUwt<48k#g~tym9Bo*UTYb=eA~R>=@eTt0_s zXCzv18n;i)xpuXh7u$#V+|*<-0_XbjQe1s-gZ}KnUD#H71F~1g zW4D`@s$yvTRGK!u&r^sK5Nh7EdUS6_8^ADxW<(`@NAl>YS6rBdS3Ox|faF<8%>*rx zwGe8Ej+c?N?yr|68wnvo|A@5C`%ga(VJn(bEyf65FFNVoG7#mgVkY6(o<YV!nWK>wwy-KMpYq7ErMQMw!3$t=dyk-t?C4}dRORsTn>3?~_)g zLPpwVmX~AQ!u2t^d69B!LVTrPC!=3K`;BJ4W$EaTJ*pEHtA%j>(zYq0oD4O}d`Ncp zML(ooK?SDCb(IIJRCT@0#*bTZ${ol>K6t*moV8F=k9d4~3bLYk5gn~ux`bySv)P1D zE`7x0iZXwsE=Ci;jsKv1;9k~EGH09NZA=fIzhEUCG3FRB4pNO{?<0GesW$YMv7-Ag z)TiKH4?Tt?J$9cRODQZ=GI6N(eu4Mhf`iyr1_OIrh>yv{dy?fi7B)?>6lS(!s@%y> zkf3EYv;?~Cry1*9;D>z(X(Km>oJYe(*v7%kMxHCm%ARy8{`pNk$r~=KZ(dKM@tRLS z#9JNfa4O?&{c>c!#&ZfOT@y0>7vDkyc(M?Hp`xV4f8U&Y6e^;+c6A+e%*#KoHoo!|Q^$j6e%JxkM-QFslcW zkvvZ#dio<;r(zwn?+5t(5O!O3??==d_CYNw9B0+bO#H;&pf9Vso$_If ztnP1zU>Ol3t)9sQXz{R^a?L}o?EIO${1EeU39`YQ1UE8TZbnyCa(oyMov9$Jpph;2 zwvj&+7s#`*FT%8Fp_7BJoJ-)VkLqRyudX|zG{It(6TV>j~gS{8JDw%f~|GeKJ+Wz`|cuBj-OP?Px1a@ z;4sKQkhKT0wg-tc0b~0q_WY&L|5QE7Pg^NC43Fz-XcqLoN}%?j6c7IbZM#W%OW-kh zm4s38kP3WBu-|h4#l*p5Q;X+8pxn!z4T#!(OioJlG!Ugc&h}5X*jqenfVd#r;&CB{ z9*kkC#_TX)M_|cndVM?r{m&bbIyQCTS>zAC@H`*gN2xy@WLLA3Rn%@1>a#naLxzB-#f+N zr+MDE!oG4h`Ki!*R#_y_+liRJu^Kyi4W<1ua8)z(w~Vu*1Q>4V+|uE-`|2%$$*2xG zbhjgv>(Efq-!T3=uHr458-{#W-4337htiTq&@l?P`t;Gj0Jxk^mi(ihqi*%h*YHAk zb`HJ2M9<9e+;ThKTq5t#Ga|erp{-WD3m53I3#w7AEk{_jA_3f)zdaBQoGMiDZA*SD zl|{;=9qfxTRlkAb@aA9KGT<%f_CY-u9f zzC^0!I5!H!Fz%P^zro>g;iqsOSaZmVDo8S1*}eHY$DtMjHv>jCtes|GPlD5tAsy7h zl+FbhXqY_u7v)m@)L?b$b2|z8>N~-sr;k1B5wEY3cOAvstOHf5-ISH zdYo~Ljg+uI7?j@F2VXfz^5u9}=v8oi-V$K@IS(Ax1l|$)(0uy*V zh2q!ocoM}2^LPh}_u_FK;;6GSQeo!M&)B{Kx}Kn4c{QHq&|rd25#%*?qj-1Jh}Zb; zwTM3XCPm+(X#HD3dUY5=PlL4kjWnX6-ZZRU0Ajhnkr)Pf41Um_hSvo8q{^+cM&TAlj)cdT2o8L4qqMpTUzBkO18o96BvC!Lj5WKt zWe)b;7i&mGsOnLPR%$Scs|Q)9%}0GLsi>^9vQKqbORB78)QCusylEXaXo%HWRasV7 zXDzFuPg~5jF0LrC&c1??Bdql3W;U7#3SEN_RFolQ1({_HRg<->uC%14%vy&}dBEf{ zYVBwY?9!yGbHa#u)M#r;DryxWwdv(f9am6*u2JQvt*+FlhROVcBCL_%e;meuMi6;WpZ5Kp!hy)@PR5knu5{lBHJuSre_?6pNiXi`=DP6mU2LFebAnNo6V^p*$MoHKqHZDFvYG2EL;;S?kj~F2RBs#$vN-+!eZp|&%|aZMvJfvZurN^x>A&8P9R1k_ z3h^|Zg;yz|MG5ha62tMbj}VWumxo`&_^P?@1V%LRDEfD@NL0Xy1iT;_Vc~2W-JpAcP|k-i0vkufgCMbhs3Rg?Iq+Cd%+2@Xrx$L3jq? zn+TtOI~Y8R@Lhxk$bXK|f^Yy%HEjrIBAkuzU4-=Tna*eFg{%~u37gOf*CMatq6bF8w`GqaN|D6*s9)ufldS8R^3?8IxLHI77ksU@@83+b{AwI$sl(!Rn zLzj5R6hW*vi{0X)P4q%UacD! zAC-5X{$9fke&gCGceKvc0p=FOPQx!mNY4S>)bH?O0lNtM3U?DQ97eM%9lz&szX>dt zkU~ZG*tPmRw_!fCjoY>NT~>Z-KVLzvH_7EIa=C8(3<&iFWCQ<4@XLrFveBuQ>~OTFkF~Pn*6Q5`-4qg>5UX26VLlR&{6@$JAYVoDH!68E`8&Ay zq+Mg*&E1O>_`5C;c^C4Q=lI`w5#plEZo_KbTC^#$GXP#3+H5)LQp+*cuP<6_a2uD9 zy0Nj-@oNRU6WZ-$$Yzo3^_nv8-No{vb}<|>m5>=tG9@H)eSFju#ulyyGz0v#lpjK# zUqO7-gnRXV!&;*|O1GOAU?v55u@#&mJJ&&eH~LO3><#&|i0WLa@~vT6>R%T0qet44 z-Al3su~gpyvIox?Z_~;$U+M>n+zRF=`pb#C(I@F|e}(eTBfG~l!<$JKHA=S9J7W9L zZ?9=bmSvyEW#>co^A_|?t?YR254$_+PEtD#^=^gU2O)bFmO{u=Fhg8W$h zMjelkFKKKW*%}PechSS?LjKX)SS5{TbxGsjS?oF1lTB({<~FebfjS(u(a#|_|5z|c z7@rCldp6XiPMGrwri;H|Y0ORI9jAx(FpfZ)({Dk=cM zSp5#M`rYBscZUB)Gee&3naJ#?w-mY*&KtBF2KYe`&6 zwt|^9CAhF;WS0f&Lb}xb*E&T|5o7C0;wxf;{ZXR$SrvuDX+4RH_I-3IB>TgnM9-!} zDO@VPRoVQhc5YQ1r|!{c9ZMITQPHK!;gw8Q9@Wp66ne6vFBrkcONLCoQE?bvj%0Mm z?_B-=wO{)qS5`H|%5j?#PFBL%N?4fAg>;K}nk*Aq3ZrliK-|2IlRSu^$YuF$>dL4L;laWK0550QOkgGElSQv>E zI+luePIaj4K9H+w&E%mvq;V?zy9MvK8wHUip4SAN^+_54s?YtKRtlQ9)TZpvoF7jTg{T#$Sh_csn z`lw1H!JCLDuZ6abU6Wjhr<}kK3Ue9-fMyFc_b?kbz{Vct=v^pD+V$I?wt zBfDtR4E&0w7$AC6G-eE)X%%WL&P1nfCSm%k2+YYi=|Jm9d=jaXi(*+miGNN8F(Yw0 zWg!e+3bR?Rx*G8f$eLQ{iG^tjj1^5kpxNk5UEyoJ=>*~i(>!Q0njT8Tq-CQ23>a;? z5l=!)rc0Q^VoaTpX$R9z1O9%p=>>Q+&U6@FjyFAnq9vHF2Ujw+!R|!UB4pdqbRQ&> zOdauT1mBr~^_@-k=;0(&F@C$6=0bBf(*dMPF}(>JQcY3N*45JI9pOIkKPn@ zh%EP^U~$4=`tJxvYD3Y@;z=kn{SAQCWP$xwQ$PILOb+OrZKC zgy2bfT%+sy5`G0k*9YNb!PNCVSSy&j(usy(?Rts=Ti5dxxYDB7R4oc=omfOCr1fMG zgOE0aMNC54XcjRGX@x9e1%mQGiI7&rA}%3qCW{DS+Cm=DrQN|JhBPman9?@zh&k;U z9LyO?vKjc;iD(C%#AMcTCRk#Ah$)7|O_gO;Ld?aEBq8L~;BzuDIx%T0 z2vbgpvtEelEJ~D#S*SM0TqVY=Mm&e5=_V?PN@|x1F==8)>bWMN#uD?5aSNozetQyC~C29gf;6KI{1EA%Pi`4mv}Gqm^?61 z?D__|UNrp?=2=ZI;@2i5e}PhV`+NH1kV+xZf+^5w@3zDA9f-v1kf7V1#G?p=!~y81 z-7Z8E!(`E+OLVh{-DV^G5X;lBs9Se2mh2sFq0Zl}zsP#OO_pCL*aPIY?AyHH~`JSv)Uk&{J)KMaqF9f%MtE1=u( zxEM#JH(@T)(SKV@AzpGTYAf{y%XDy3=&wN}2Oyg|*+OQg+(v*7ic@c7K=OH{OD$$o z+=Nc4_r=g>m=fI>tWwuSuSE(W@e4GR)b)%oBz8f&OT9mlq)b94Y1@aJ`B^6dSP~_&_35OkP+(2`h{o<` z;wnM(P5gn1v?cC75JM7Yphi-kje8Ts@Wco3oBCYbmk4Z$4s@T?zr@8MZ+qfHKwRIPfhv{@CQ zB?1=-7mHzBj+#>3SE?|1nkfjEO^8bb$cja-=f$d~+_-|lDt94q8H2SmMskrFaN-tF zMnxn{rJ=eC=fPuK_zn^wC+;B;_7mX zaMaMKj%ZA#LNtgKraz#KbV>ULvUoPx!?XvDwx5vP6KyKWVf_rL*?1E#CXAvqqf;Sf z`WBs5G#$eTi%$17Wa5qb?qDZ>f@BH(Ql=w*{W(aMATm)DlBGD{gA*1J{wnaLgiAuQ z6E-JgCwU0J1bjW0cT%HC$YHpcLSPS_VJr_mqj7x=WYZF8l#0s0VH zI5=ZC2iK_Jbkt@ar3fw+EJj9s6l?2M@L7xqeFpI&)TrRo7%BVMIXGVh-zUjg9GtC! z*^un3WHdtsXQO`lrg6#1DmWC=T3;n2y9$oQ7}k$9z!-g=3hLps{t9SQz{Pi?vUmMbJ~#|$M-jwJ?0eadzRqHNS3EhIkySBN+2J%GB6MC+K^`AbA& z^ura+an#xeKL{_f<}V(P(jk9SPn1D0EkRQdg#!BSs8tM@^T0`@xmc7_zJeAZkrsmD zsScY-IPn3>>NF$FH_@6;JQFtxb!HKgUxl>zjh`tC6-h)6>$k)4G5MTRESv5 zDU)S9WxG@~QS&fc^2okB#2yQw*J8Pas5?dG#1u;@733~Hfm_T2(D+`}r_ewdqaV`9(QCJ&K&BMY z0S#=E%Xhlz8)zJRUW^_Ev2`IHtr~@-A!s^121Zjuf0|hI=PiVpU%@`>px0DcA?Y0q z+G#&9v5!?$PYr=*ku)2I^t^@fzYp3mEJA%UDcv9R6*#yFoAsXEU{=!t*6@8=TY|Q1?V2>Ms8XY2{5ZhTmx-=9zCck6RvGV zFYDV?JWdEH6Ox|98{z1SHAV6N0_ z4ECZb`z>ZE`m225+T&;d(J$#o5yDMef-(+V$mC|Ia!ikqvcB4EuRM`WNxoPgo%C z56I?P1N>S15Rzt~#Si`q%ZV21S|cW4rzQD(oYw zu#j{V3y|2+tO2|fLerfnN4n3zOxzcOlhOgF-OA7}LTLIQFovcd)-gD!!o0dEGa-G1 z0Yyj@6Sjir24Sr)4$-2jilwEonAdrFAtbvj71;?>kqWb+M;LJmDf&&FA3{ShA#M>s zww_%b;xbqAhV&%*W!4fCxoxA$7n1IRTcY-{oL&y2ebE?(u4iU_tfGAABlq&=mtKZS zk6OYzHf_0hy)pl%e+x&X-@u*H0X<16*hLs0GoB|&BbZu~P&5S|nx#*mQL>2MdVW*S zi~b#k%1u0;^bLmY*a^&}TPY>=Sz*Ky^t)kwDhZ&4f{-4-RFw8C&tZ+4F&9p{0Hb23 zv&w#qSgc`;NQL4qM_&oaE?|nMK+mx3Jt`_B^+E~L%31!Og%T%qMDx6Q5kr4gWqJK_ zr;moyufB`%yP(;Jn>IX8teY5X0I_&8t%Z%*Z!>gk2p#bNvbko@R07xm!5!ssn8rUg zM%9#|(74#EO$fz}7vSRf<*>fh$c|%BN$kKqUI!;1!p|Iz(O-W%Pc9g?C~PWxR}98T z6{Ei?#NMH>$#*%7JuOCmYl!`+!X}5`##8I=f({53hlFHBi0W#Ynnk6`PB^cyse%pB z<=il;{udR6bzGrqq+;`=o{G@1%LP^_Dn;?e82wpt6)sVn0xwL;QIaHJI6)D0Os8%c z;*hl|7I0a^L`9YgHB6CL3bjtPdeR(4=oTh0O;y!6H=ONI!BC}0QA4lis$%qiRFxzw zQ`po!64ojN^TP&B!k5&9jiH3kXcKPJCfun>cuGz9NGRb!ZNgSI!Ej!qcdW{OR@G~Wp2r2Lj!RVbiy?MLg-r!r%GlFT1%e?%!O1@x)MVS$WC;Tm zc0Sk!n?`oGD!V%*YuC!&s*ydb%DxwpE!D~{*2t#N9)Wtm@sR9tt?YV@tX-A;Atd`J zt?cs}S%)fnF(mtnR<=bW+o;OwY8B@vyswo#rjey1N@lM)BzsCL`>RIwyegZn${J#n zQH0!|jA4nX>KkHr*08Tp*#koCG!1*Y${rkI57e+7Dtl;%ZPTzHP}$dn*i$v^msR%I z5W8H%KB%(uRdzzH!p=d#5|$~%KqA&C1Wj&2pZiRBZfIn?dv$ohEW$2vV*}P{o#{c@ zLE`|LEN%o|gu8jDASr(wZCI)jx?rS)8Ub z^<@K(y7fZRINIs1Ran_c3@H;~W4}?rDsfZM$8Fy;o#X|EXL)fatw zLy|6W;}TW(D_r+xppW5Zy0>xNdqFqhPOwn!xM;g_LvIZs^l9jbZ_9J z#D9ij-rr(m$5$u7IflD)PfVCwj9F~@h8HdomILKmj5fkL~7pEaI^AlHX5l>J_nlq&x~QFk{4~hgNo_A3F0K~&ny>If2GD!SdT_J z4VKYKcRjbPX$}`x^z5C^`gE^?co&g(aAQ-D3vz78MxJYzu>f(!c9@MJKw;w9 zGZo-Lqj6VHs7VIv3#j-McUC$Gi_(;bBOzVJjM?}IGPpnBmOcS7RFTkyV(9Gb5(`kw zp3r&%Vv-&?<2L>Pq7NdyaGOcNwTgmp466`!iQU9_mUs+gufyGi5+~S{1T^?W*);@p z#$}W;s*i$FGbU775vqrZ5SjcVdg@S={b8dqy%%095YdY!@3;vMwE$i)7RP6J)nX!7>L7D2jxt}G#U{E!^Js_7HFs#31#X~p)2J$8!8eNf`m%KLWY+V zT!K|ALbx(gBs3_Qvk`$E?^2Uy$8alzIBM>Fy5cBn5$}xZ%C?;6i(!JsA2v)ZG7hAU zH%pI0VBBn2Tx854jNYkm;by}`H6JEk2mLqPvDG!$0FP1h32z`G))|c-r=zmhg7pz( z4&l~q2Jt;2-xBc>h)xF7I&R&cKnz7>5D_auT#v|j+|nwHg{p2{D2C#&%k+@gVP`th z+z+wYkiQMLxlxnG5K5yYp*>ohZYxBpA-f1SwHw9yS!P}{;0lH$1*B&4OS;N{*ENG* zRGK10PULk;)k5uVSQ}PM6O8fV7_g$*q(8wpj7Ga?ns3Aj#$58O&}To!vu;Lq1k$Q# zWB=O(1#nl zZbsU1pvT~5V|xzAW`m~ZrSVNLdIZ-;55VehOHmljWW^vc6jLk{#2llwtPh$=7l<`L zp1!?crlDI+qYuT@G%+X8OlTN?0J7^zo`!K%J}MLo$zvF2L-;1hHsWSO_$!FKOo?d- zR}-5;F*R{$2;T+SW=c#$_=kvmfSWGEdrIl(WNK|6-q@DY_#zI$VlZl%V7*J|DI|QP zh|#f@;X6&5H??V^?-G^?DZNA?;esMUr(+4?Se_Xjpb3UQD=a$GI?Ge**W056!&3_5 zGjvpXw~X37RwIU26`W4VdMh~Xk+EPMAsDh1<&@LtaH@n++DxS}4s>K2H$x1?IE|e8 z7~@wWt!@&;zJhCx;TBk;#!wW;wOmxR5BQu~1GujT5bzig}4hAcYd04JA@zp+srM$N`V; z)EEX3c8PSg*Mgm%q>1gx%Fd`4)HfL%S7(%L41-d&#vu->@o`UR?lsE^( ztBCv!H=AtKJflO2G2F0AH&aX)Ou5r!vm5d|akI%rO%oGJqa+DWHXlItAa1012U&WQ zGU`GxjFjxs6^lu_Patv((yh3SUxWA#5&B>RF2hnA3k5n*UITlma;aZNRVzw~p@PCy zr5I|(SU&lSw9bj{MYCM2F@r|r4knDcV!V;P%QB$DhbXLQF?M(WS#-Fd5cFbVhciiG z4#xNlEGy0#;ze4LMAJ%IJZBh8>kgq$RY&}{Y(3i**G%W8vj0VjsJQWuIhdzN{=A-v zC&geDkDKiV&g%1=cx7@KjkC4P?Ks-sX#f9Ny{ zcl=G5r)&oHexM6jbUsKr_4+v70l+=uFc9GGc?}wy?q+VjPIwwJ(zKL~$hKoXIGqUS zasy&rZlRx2`dLIjOYnms{fXH;DLtgY74>ror>!U(joT}TjO~5{b`H03j!xJ{hAc32 z`ukvHeLYw<7&08UZWM?bMCKDQ1jI%}=v9nPl*V{7kOQEaaF1I8;tK=@vs@Ut{%kIhzucOA&46hDZp(e4U>zxtzsyKvs`u= z?$e1(6aA&^dP+>1?nh)D5u}M;+j|_h`Cdm@lm54w?$?=huRw$zrN2Rmw}3c|$RQ#& zg7_Mdqqw7HDfXqPZUI}RTQjN zt70EcRl%vF*xG9K6|LG=t*C7s`b2H(v(?}CyY@aeH-Nsr|L1={pMCBg)>?b*wbx#I z?cttlif#lY-Q@-es&pq7EDssNDjFk4{HO(i)G-);_VWPy5)J()TmCc>Nw69T2=;?W z3`ArAg7@<^f@b{=nW!n`+z(CWhZT0<+sik+d@aP7X|Qr1Pe2XVrr?^V9?NqSw80^1EX4~N5mWnZyt7aHZ_F*5Ver}!jr zb^1n?#4Tl=Pxd+jNe{6SE>vfNCNFk)+iEn7zhGrQBv4i&JN zN(7PDIz-koL3wRPWFtZbaYSBMAaxmn$jhhc$c#*RO(sps>qfxbfS~2&WtrO9TC_?* z&2*cdHU%(tWb5KU>g6QCHr);M?;&`9TBBKyZIdk5p8?A5z@P_!p_l7NP%}TVkb%Rd z3Qk@x0F+Yok8~_=yBU~K9es=5$vmxC`4oi6WfqzMWg{CgE;Q@kaG)Cdl1|Qk$4V~ zrgH$NV#|8{B&t^6QB(dah~^Vh4s8ewF@uVD>F5D+ z*~5+HjWy+a7V0w5pk?J|xRzXi`lQzoT?}&=SZ@m1#h2+qF_YU&zHcT0Yb?r}rbJoW$k)Sx)ZdXW zif}e6o{r$nIh6=$Vy9y+<40LH5c$6XS$(G(e>SQaYoP@B2hp0JBl2$oS-X7X2Y`4Fk%OnGIpDXKzV<^vi8XKH8(0Cq zRRlSDYLHE)dK$@3PXMt3Uzt)G$*gHoN5~0wYB?A~8Mw*VSzZI0CP3Ibd!|hTS%Ftp z8?k}Lxjv0IJ9^Vd^E(DW-9FIY3=O}cTVsIk_JK0a??uUbCI$#12|2IzrlO~x@2xOc zRw|v-|8ZOoG7@FsWtQJk>ZQgi#CF-TPtVba6zLcQRKdXw7CE#j_wiO>wy|y$TbTw5 zIXCp?BXGK}5diA)f$~mh&4CkJ!);T3{8?(`zrgA%j@;>tSc6e06kObD+yYqrnWnBYTZEqu|>J3CF<2i|{IhCu=1@)tgTCqX4(3F2DjC zFb^Zxqmc;p#SIiB(gq=6r{HlOa>@{F2Z`?@auXAO1I>RTaugwhB;9!^zO^4#fe7w0 zBpyZN5hl(<;$=jBhLCVq1)5jpGt)t4b~$Oeb3yZS6lL{?sYkFsK%xK`xJ<*`UVqARtMF=2d>7=iW`q1LB>JBY41|Q=gs~(s=Y(;|oD_?4Y5bto@)6)v@oO{6+-9{yrjiF+uFd5qShbbj-(g zG`6o}FQEKaM4*oS7b0&kK^^-mBJU%3PnmkPj){M5p>GbL3yPN`hR-PdWrgRUZ|+Bd zI}|1fq3%|AMLoa3cxR=I2^>!KoB`hLSzCHltUpR1qdQVpMonCqEcLr@-iZDq*fymW`fvl zh_oPx6n*SCjqOWuJIc2bfl}Ow$Tdt*ink(i3xf9x!?{0Fxn&~*4Wbhz-BO{2Xj?yF z(;l@mZ)4M_qz6FYUQ*`S4?&q@ z9^*XV8(m%i=HU|aw;-SUcKlok_~RyD4w~%XYGY&qlTDAc1E$|x!Ii#5r8rvJeZ_Wf zpyj=4TFwKXXlFy7A3`g>Sf(qnt(V#8>zp)u&PtdSH0ezQdm<9aa7#P{dqhkkEei=d z5f7fCPhs&_pjVAZ6+#Bd^<9GEgmrklXZZB2zp$DM?ZnDLVBCSC%K?2Ug8gkIZbak; zgtRM>u(#lGH&Wk2$RMJ75IIjE^)Nyp(-7s4JCX{eZDu2mVm)q3c>`t4Nu^7TOo&^2 zVrlZq7>QZd@>Qr+d8qyzM2?|qzN91&Z-8i@uQMc0wBfl1hIXF)dP2F#e%a%u54akA zT?j3cPL$ErzZs96$aQZ) zE0Nd{bsYKP-K3ib*bT^ChT>9$3}(6=$f-kW4MN}TNc6p$j~nqAn%N8i8X5uX8PeE2 z?2UP-I1eRh!|<@G8}alja-uQAyOp$aP)ZSa-vp>FsNw990iF$fpK_v=Mc_Sy-XichC`nt2hn0V4O!X<&nFP8%G3_C;#!t!> zEx2wS?O{Kr3!x*=I)m-+0OSeOc!;$^i~nG(4DD7Gx*u8^aw%DIq< zI4+PtPTJV$d&&1f6`)Js~1x7_rX4^ojPGP3n)t`A!;tykymkPQk+|TY@+~)ASd7 zWA0?hg0V&~7=MM1d=4RVfgsGMnBGD9Erg_1K&#W_zeJk1XXZ@@f%rqGp{{j~%cZmx zs57WyKrH*%nN$43+pEJz9B8TRfuGj>2>_zCJSE2$VT zY|(1nAba;hz5KejNJA+6OcIrz+#R|Isc5i(dJ%dhQ7U5FsdFCWvf{225? zK3snFBNZ;cZUp=d#NqPGhxDQ`gJN;{^#z!e<=0(+zY{^0UymX3Lj+lV`2>O|5r{6o zo(1gBh|GH_zeD5|1hf3|(F3MhfBE%OBd${}zur6#R}_mhhO&lhF}eI@l>I*_`T*qL zMX*0aB6S2*3L%4FZXA{hW04w#;AS8(7m+zk3_+qCky3;N?)dr@1mrc>>Tc!!3ldD* ziX!_)JemNw7Qwy>iCu_XgOD+ewcLrEhmksn;68xFuMznL6Hg)WXGGpcfPa~T^qz51mvJi5rE91tB9FiKOd6X@Qf% zf}zM9h%la)^As>LXhh&K29Kt<8v9kTX_#)??wOyue+ROxU!=$&#z?| z^pV$H9s@LQX~}h$XA$|Cpj!cDaJk>V0sK%c>1Yo$`4^;NXD^yhjguvonr8)u#h^YA z1+hw3n|wKOvb;BqD)AHo)C1eBJeZy=-rZAE%~Z=95_+ngs^>rLEKJoqrExOVPIcDY ziDtz=L&WJ?xvZ?~!JitTl~!F&GHxHq(RV?`Ss39gpnss9g%QpwCPwjIyBg)l#rAnz zmBsUBgc_rD4bIJT^bB&GWD0rpRhG>u< zAbl5tY=2y7YF1X5HvV+SO-0SJaDNprzebRycfx3NJ%TK~a}gPdAa_0fW+((KKjv$6 zbP>M{u%$%iB7O}b=P|)Wd@CZ&2y)lM$M;UA@iqfA>n;u>fn#MIV|J^((X`LrUyEL6QRU`y9@{~pFD(0 zF+OitaLl8x*T0mY%>jH#0#>_aSl*f+(Vo@10D^SHwereT>Ld#ODxs zmI*52tB4#$@SZpP{4a{Q-+*C4G_j~6hMbJy5Wgtmdm#A^>!2e39g)wNpd!YPLuCk} zh{F)cW`c@13z4%Byu*g_Hz?u;1HyL*G_od0#x+lM@`Pi1T zEQjYbp*Bl>Y)_`Lh433`#OE;{{}uc86I+g;;?t9m_}uRECHp<=)nuf$*>W#Y9bMs@ z^KJHIS;5HyY@5B1Wa75r0oc`&kagNNyGa&pvNYRfUmyutnr*W$XCnS)E^RKdWq~yc z?Cr4SaBLLW+hGqD_KuN<9rjpByhroB!=5S$vE|*i7}fZ<6iP0(<;^prYP#wzb%`DS zBk&Z@-zTX(wk#9paU(vyIs-YE+v0y%^SC)Xs&=iV9=8YMdbc%p6#o6vQopoEFvS(I z-0=L1owkSM+T@xTW7k0du)1ETxaL^ zh;f}=`WDD*F|M;~B_U#5XLt07ah<)RM~v(2ojqb~(qbH|Ti0q@XJtXemMSxq^K@R@ zpfWp$RcISjW*10((o<#jqMj`JLZgt$Tr(VDmum_@d7c$ll(Qe=;0Tddh*(OrXW zbr1^2&!hSaMvJ@svWgsR4<~312Ej@MxldOFdGVtxKL*{Fj~Z`hT?ka!+`SR-Hz3I7 z?p=u7$pkldA3@|H1h3Liif-;+(P0R|%V9L|zG011RlNh%mm!`ckDf z19LAkZb8WAA#q%Hs(s)pfA>?^rZ8?I-=0R0NO1C3@Bf=n#?1xo4aU5)_ z#(y#ZBpy-X*2OQ4mqZF@x$*qH?PL12PQL@@ZxFUOcQjb)sD3Myg99$vT6smMQ^!NB z2;(E0YOU9F93+^j;Sz^Ya~SX2{QJ4l7x|6a#1h!k8(L`s-CFgxPpfr9A@J zRmfk7pifRNFq9P5MBw)4_!%B&%{BnEAjn*|9g(d}aIV{l$TbLZ?&4!Rf*p0)ZbkVm zMBq$$0Fir{AogKI9!HRK7a!Zz*#7MJ3zQ!r0%ynHA@T|noE`s!$h!#q%qUx=BQRO= z;nhOfuz6L<4wQrQCu_z|12xe=U33nhwq`&os&5$D8gjDt>mpg0``|&#kNJoS2(mE$ zI~e$k9C2aZCl9@ZAPe*Fo4SZnF4W zL|$fsEFMGTk4%uoPY~hlYhlrz2=*AfpPaxVAg#a|rh4w&q`08wD;kz-O8K5Ry@I0WgZj8XDGTYL*2G!Kn4B?na7!_cd&25Y zB`%zvI_^{pXz;aRzbn?T-=)N`#}|WuxBsQ5K7f#QU&RHF_eCq(~prJBN-TyIiT>X#&169-{#D+Fhg zfD;EUEmf$-bHO+^US3l;U-f&W53;SSkpUD=R(+XNR}s7fTSCa0${dY5Srzl8PjPRi zO3506X*yH&>kh~2w`ELI+WWBe`Npy@=<&L6Ex&47{k=~{$Nb4 zNFrw=R2EB8Ct~y?SczrmvWZJW*dIZ2R5e;H@2fmb4WEf(&BY+ig*AKLKv)BnQo3ZYbjg>}Bi6WiC{Z8luH=ccrtq;;rm;SrM^-e}$LFvg zM?X)`=M(XQtl9@4!24Lqi-l|eTm!`UQG55+Th_E0i6|e>==UI%+Y##Uuk_Y^g>M1o z$4EYbFpV!gy|4T}WPO0-n+QcstsAXFRO4}za7zSX(j+(ohh=3|jbs%H)F_A#Ds2X{Y0nK@hhPu`7|kgCzKyW}k#@-Axj$r){kR z3I2@v2LQPjLEb>&g9YR*8quFF{~YB%Mv!A7A0{qJ)aQ=JL_VQeK((Ah;k!n2f8bGisQA0THT#0Z(ieJSLjoSS?!Ti!FG&L6mx z{linbML;V?$asUj!b_5?k>abI6L?9|M+{gWqnsymdBo(&Tqj`K5#*&Ce!~KWJ^y5k zcLDf0)yY_3r;$@Wl&S?}F)an~0vh>=!xl_gOAIp0d&(*Tc&CgHL)xC-UEJj6)Q0KK z8hYP)^scEuSq^tgUd~*Iab+Jt$;DuN2ZH?s64xQJ3!(bAF*r^XJCl{*qiPe+BK~ne z-45`5#D5owgNXb9!TZ3}$2%`%ME}wTO|g!L27d_v1HUmvT*7Wf1wEMh z8WKe@z=)y@Ju0GjOkV)(xnq^cAMe%v9(H^q%}d!_vWy-=oFBE9>Lcc5qnMZH!0?j@ zVqP|idGV$K5JAk#MlmnrksO8~=H)V4jxMAc5X8J(E|%jlB!564nwKjG^chm`5s2pH zN&%Wa4XTVViMNIKiFxTnvW?k)1xjz(1b##g7a6sI zm%)aZ(ccJ=280+nu()UR`)IaRK(zA*E{3dS^lt*%280Y2$mqWksY?-L^!tbbYbDA# z`g4iN(SJK&_94jVf5Y^Tt9iBj(SN()_f(_b`^+FkN4Kc7f3+7~$I&>;P*CzpC$%s0 z2{OPDco3EU06|9J(}+BY;FTG8e+256GHeH5$rqUVWXw%Ba2#_Vf}}PZ^s}Q^|GZ8E zW5%4^JmzK;uLg2VkGq}DZgV;n-XYL%&eWJcSs-cKu<(@tsX(y*4zM;vnh-MJL|A>3P~7J#&BN5k$8o^~wVEaX*Gr zr>gZnIyFgmPIXRC=Ty(S4}jLV70>~7wua;@Lp9{tvRkdY@$3ZxxkI)0gDv>}8GLcK zGmihTkaXi%>HcL+z@y)sbYrQ+c^N9{#x+tKuVy9Pxb98#_1MuLGI6uHrj^L@aonm+ zx<(V?1vE|QnlcuS?)Ff)Gl_qMFnT9Ue9~?m-vI9}={g<%Gc+J+kHoFX&nPSoU2^p! z&Vcf$ zj?j>)XwhVWoryY0+J0ANi%1W#bS6!OAe+)p6GYG}%d#_?}K=Nn(7Wlnj@|Xr0KDL_dNx8yxXtB6 zDX!6-k+2)GN<>bUBxw1N7)_P65($(w3&SHJ!(j0D0U-g(Q1ZehSxzVP)qf0RWt}+) zju2jGz=7`YP&sZpx)3=7nG@=0uVo^K>)22Wqi2cJlywC}8Mw&c(dxye;p7B4BY5vl zE-N50;Z6{V`@4pnL(KToG62?Xr@)eACo79Tk`GL=6;sYKgEgfGYl_AiMX3g+8LSa}{ncrSHZ1_(Ef!)= zAuX?vva&t{&EVxqA4EUR@@gx1h2C9>8-a!fuhNN74gS#0*XSl~J^8nEvI=&Ki+{8bZg^ zodD$;L4zLCpw@`=EMy(QFK{i3OCDW%L=7ULvGI3iBI`G*KZ{XoA{rjLTS)POp9a2L zWl62|R_K1STQlM{h$3!;2#T(h2nuPj+HrS)3KH4GvPO(SE8{+sR=yYo{Y>>`sRsQ_ z4H3|8bq_G@gGxD8JBV&zJdH)6PDV#t=$?z!B^Yh?lYkj<4F-?BQL(%D`W@i{=~N$O zqZ&gLYeX8f*Dex3-jLFOMY>wjE>bfz;6F%lf&lVTlLnmN%W_K!N(W2n&S>di)nB+D zafG;WQhFd-8fX50Zrod#6zn(UFAW{uOp>u}zo|EnW-i6>upc20$D?JBh=q`GZ$G8S z{mi8f2FniG6>cRD--Lt+NTA}c)P79X-B6yyNk&n*rwmadokbO~W*%Z`D|zlFjYcF= zSu$mvmDrV)$Jlx+af6OES&197?qgoFmAFaATCBv)Sv;k+M&yB(vq0LspY?FSAU!}! z-0+#v^3>*wUeyme;}=y z&$61oQwz=jqzKmeyNncT<{L1n&iyQ#9WA>*McAA94veUC70c)jX}qg+htwZLR}7`G zjE{o3)0 zvX?djhj%t#0sL}U+0f_$Q{DEkc1pCHJc z%>dwSjpKc2gta=u??kLdWXY4j{(9Ar#;5~9ya_Yu8j6hFH4sI8Nr@df3z2CEvp&Lu zAO2Yelmf?0_(46+k>2FE#s`Z}IeN0ZBL*&jQkj!~Nc=fS-&Y8#Bd-EI^Qr8J|GhNd%%BGnIf{Nn~!!tV3iig4zD?(Z!`f zroa91s(_qw^K(9b!84~=^Ernz8$N39rfsO<2^3up)Qt%CQ6zqa$g@m5hs2)|c@H6h zryaG10(N6DeW+UqWo7x7=%Q80U`{kNK8Z|7a@ahfOaP%XE{clM-5Qi!29Rz9yBvwT5cw_>^O5*5B2OV?5XEgq&TB}$icoj~5>F!c=?hVEArgN< z?w=66PfSH$LRp}Od{HQNas=qq^Dhs@Zu`i6)J0Kd z{u|(ACYx$NDZd3uzef<+e2xe=1w}UL*dpqKAW~jqD8ROvSc>VhsFcqH>_`NmG!2m{ zOo)^bnU5e+_VMkLDMh6W*cC*klPqeqof9e;r2sb|T!F;->M$!IVOD~ZY+2dSruqW3tRYeZ zdVXl{!UQedGcpyqVH<}v>8JLGkb_LNklYK3bbs9&0d(({{kr+ zg+|2m`hifpH5U!B*yoGEpS2{%k`X$@BUf5a@lsi~k_?s7 z9+>8qQBD{NLzHh!mPLg@*yXx8uzIAKpVg9#5jte*j3KC|VyXZMM$9=P)ln^?BEVo8lTg#6Btw#fXsn{t z`lV=V@`>ZT&aXFCzay-0;1FzG)Ej<>xRg|H^U;v1h9{>dIH#X>`ig$3Dh1YdEi6BT z+h`0N2&@}{o>@OxdItY+CBsIl!tt-zxg^VK>8DlA2)NZ~o=LWuQqz;?i?oda!raKt zY^OodyBP;&g-16>IsChlyS=-5!nEXsi{GdT4EmLK)(ljcN-&zGhz# z)5%cx*uJ=M5G31c!o*Lqw3)!YQn8c}$TnjcP>J-WHgMaGy^@A-TDeFWIEbcqH9J&J zbZXO<7{0d;EmX4;^c--pir0|H6u-Edj zfsae^2=2r~@qcPUx8M=D6OZ_N7<+(^!+adUL-99b@vq|%d>aqV8+v*yW}!0z#D= zuiT-Q9Z7GLV5CS>F!IkH%tM>s$5)^#)fCQ zDV_=*p*V8DOG&^b%;S)@lYxf2WXf}Xicy9LOXC@Fp4eW=Z_ID#Xy<2^a??ZtGu_kh zt*R<>SR07$2;_K`1k)nH?w5#UKhi4Txd}q;)eT1p;e0=o?>gQ)$XCwKHu9Xf7 z#38yHf3@mnu7)%7c9QK;yIgbUE8wXZAR?rFD_(~vwp{F<6&=)kC zOabuG-hj5Z8U&S_>^YmdT?a4Qffg&T(+k!suX`8D`nZXn>L%HISN%!r>3mns$=1NmJ^gM_ABt_26*6@5Oo$uzG>zC-9hXOTqXolGTqzTQB@sT)bx})0&ZPYl4^*OgIQR5?oISS!~`ay6I)SfuS5tgs{g-V9AbCBV5bUPvT z)ALLYG^*fbA;F>(HgZ3 z-;Hfj6+mp~QfZQ%@6Jck?RLi#(R)r9d?yyXPQWkI;5jav(%a%h3=W>xHxPMsHrGhobC<~l$jhp&VBgDZ5b&w z{QkPp!z!E~`!rF}nKN}gd0>N-^JVU`8MmpY&iP7fD)3T+CGHsD=5@JaW`Iza3oIga z8zhe6LmPrQW)j29D#MiC#Xm6q1h!K0?Aws{Pk)7Yu^q6c~R%R)HntRsu9vlrZF7?55-2vmp-= z(P#F*BchO-;;b-+q(SG_t(AIUSD_m4zQ)g<2H)W0+x|*H5 z$Hu$>;$PFrzd>V8(aR#Eo?h;=$W1DC`&79}XQPgp>EbxTx4myW1onUHZ9S7fCMjK7 zU5i0+A}4rg0p=X~uE>Gg`b`>8qPzZjhki=$uE*662#1e+(s@}oFjfn8oumbj&T#<8 zD8aX(1UvOSo#}M?$`C9eT{;3m=eSjArktJIC0mr^cvLy6qRKHHr1wBM_G~};&B}4& ztSZXEN!0b0NGD25eH}kWUb>yxF)H&N_lT#Ckjy(AogxX5N51%yHfOd{(asw&;KhFZ z|1tYRw~}|k?L%K+nC%y_rMpQ^gY+njPCBRfK5lv*BoQ1Y)*{`T?}m1ZA|;lBj09;tO_q&V!D^b{Iz7BJ|OtTY|S8hlHRxAHoGYB+j|>23C2_ zz1urcOGXg>%R&*c3a6>YSotPj&qm9GAz zCcm)_O8L!AzQr`z)b(#R`C`shr)qKx%eu*Xz%b_9Pc8UV8HlC8+>4o7?7HW8`EIiF z)&&@Y@V3w`QqT=6y-DT{Txa5hDcuCY-Qv=Wzk{_fR`^_ZeFqun-($d?#{MtC`fOFh8}KtSWCD&~iR@Bumz#O98zN+}w+MbirjsDntm6Q{G2$t)KA6b~ zf%z-V>!<0KONN*uj73Rk-Z1r$B~hKtGW@e$@6iL0HMD<3>O8v*qacW_7w?^P+M-HUNe2kFDAl{hjh_iY zU-|#V6gfD6k-jqSZR)L*)Zk8b?`?X`&Z%Hg)B=lnU{)|)c_m&68OU(mc5g9J*h!W7 zd=lj1Kq6%LyXdH*H-y!s^O)rH^;gYRXo;R5`${J}f3Ss+wu@*x`y$pZ26I8=e>aS_ z^AOhauLu*bMwtK)g8#oU@n$R&KR*Q%L;g)B9wrl9nLi8%BR%iHZP1~SoLugt4^;vK zn&c)mh&Coc(4hy#hrzPX--7X+SG6NZ>)X%yK726%ilRP;(N0DE2JIK`AqSY4&F%y0 zC@c!R5_c3=4zGyy9_1Cwa$r;mU@k)zRLq;5j%81(JBWGMbg6_Cp`PzS4q#gQ7l??a zIzLt7-9a_EK98HtnHdg|wj}AYIRjiml%p8TgCa5EQ) z8XI53_3m)7ouc>0D&2k{ru7?x`>)m!jLUjVwM~%ItC-^)dIy+Q(=gY27h{A%Z-;Qx z!F3Qz@aVkfPQKmPI6%<#d%mF3!f1n9BNk&Nx<9PUGB=9!PS>I=|5?y)*RmeMU(j83?z9f+et# zN$g;*>*0Ttz)(6D_)sX=4diyTA_Ex8c@X{XVB!Z3B>TdJN^>dyvGpK)f_dsdwb<#*F-A65oa0+d+|!(E5g&$Jtb|ESywyvLE<2ZO zJ1TwX!byy{E}SUwjkpF>&hNFY@z+|X+RJiJ2fwnL)vf1nXESCQZy(2|Y}%Y@=5}q7 z=^2DL$AB?xw`{-7olSl*F_S}>YDsqcDQP?xox3rjAuy)$^P=M!w)A zi~KtO*eVlerxE)V7`xJSG$p%5#v$y*B|9IjKfrEF*_6ECrS1{f;lBvEpt(o z4+fl7N8DkkBBKP_u!oLDMD{p*&!~STc6#rZo$oBz>ahSCSc0rQ?m$FGoCk97 z+y^=>^~bevicsUJTw1g~#d*f9kBa4)3!-9yzCGh&T2A0N1Y^ZVu`YKeX;=9n@y3A; z1Jak>I=Qo+KnwE;H1%X47^ILht+U<}hs4qN-Vt^p&<3KO5Y7W+Hi@M@kZ8G`^-f{_ z5qE%-0guf&v$I}IDW+AU$=HOT<)-R=lejZwe!;|>r|p|oOK$df&gF9G1XiAh$>45R zDt33V9Lm!_kr|HxK$)daM)zjGc;wC4f9oLfub!--j8+0AvfF0bjL1m=Ty04i;?A zj+lvmmgaLDw}~3K6X1M)%5MB6HPuryOPqh$umM8yB9H{`Frk1ax2{Ox%M&TcdLQdT ztoKW(78qI|I5epJovunuzZY+*1e+K^FG2s4oC7l5z~@Vs1rSbc(^FAC2z!)Le#=Sa zvNrKw{pU=FoW*hKbdzVgLrdI#B`(}Sx(po6A)2sD+=Yk^$#)mR3!t&B!bF|vj&_m` zkUhXCuU|H!>ncTHsop0z_;nWn3u0VU$tO0r#V%xd=eQ$KkOkk6R(F~naDyOgY>0#L zMQ&<^b0X^0E^%fkG4gpKpOyP|34r0H= zP{t$&w|%tCBfIp}b4Ug}_3UAI*4l@?-d!vpPP&clC^+52O3)H+waAM<0-rr=5j(4h=ck?-OGTh1XyQrI1p z0($_eII=_wz^L6R#34SwM_`-&_e7H2g#jYl?i?fVP)PVa-CI!O{{vx=xd~QuMwC~l z>NXK?>SA|r9VjiPn}6+vqK1D9KmoY{etEAp_zm!T@Z|oK27HbNV6brZhkZ(h$r9ch zqT^jTpGKR|Yb2ghB;foSa6TvS{e!cI&ay;Exs3?51|);rh)XbcC}U*M6i+KA|2Sx$pq{?y#?iKaPruDnAk{ z>Tl8;(t9dU-=l;#85P_uDmZYFJK}5%rbROGjKr0a3v&!TU+BFx8(r)C67-__C%ZAY zFW@OfAHJ|dX` zGr!cGF&~cYeOvSM-5Fd4qj4`LB-^JmSptB>?NaCOD7nt31{1VOO<6EK6x3(|%pzw9Mu z=~onh+Dqux2c&~zIR>`k(+uHdF3>vR0b?bHg3#jju3wze0_sALR zh8ThT9-NBfl+Uxcv`#_LOIzF{0j?SC61U7d$fJ^>Xjqx?3gOk=0m<+PTWPsgYA;7F ze97H3+?;eiy-;>cyo%#)QeW@|+vc z6(`sPsp9}BaITUSNKkf;ykq)of?aczM)(CNTdKEBnETwpbk9jbTE>0(??!!ZRP)kS z+*|;I2e5Yx#%{xiPIZ2=RSvFwx;Q3-MRk6&)%1pu{K;K%oMvs0?`UudmW?H*Iwqv+ zxEy&-9Rn+U_VMx^ZY-!79KgXUZX}2jV{qdJ1NDYmhh3}EW#$CPgX!tq8&uAZxv_cb z+dS;WPV`}#b6FgLWuQ5~-qh(1zEHC7i{qt)6z7FaSjtH$7o53lKetcKUD3z!{xnEZ z9?-la57VUi<-thB+Wf`Jj?rJxq4e2=$(tqe%pRkaaIb*N1`RwW+}O@Q zo7fvgC|R^h6wh7k_Nzk@wyz(r>|TP)M>OE-a44`W+ zpNSdu}s}Do1(XsgI=sCXY66H9p z%1!$U_34|tFf^xe#Xl3;j5@@lV_$Vuy%XFQT+3Y74GwWL-zJc7&dxq$jz!|xSS*F0 zdhEB@A|49+MbL=YA})AL(P3&!Oc_hGW;2Ep5G>CSbJ|hI2FjDL@ciUf9~ti=i#jvA zO8B>RHYQq~jRtdH*kt3nvd&J!b@ZNWIGj2=y-G&DfrcH^*;%;z(gTfqzB>E#DlrZQ z8lIQVPFm#-BDCh3UBTP&Tu{5Pw*I#^lDexh59@x4HpxxVt)Vh#n+O7=9!={cZ_G z=|h+)VRDmr*WG{JgY!Z>@8x5VyiG%c9dfT5za3Sou_IjDS{Dh|c5a5wc63(Pt`D!R zZmDaEST$XZO?6?s*Qv3k!CJmzZdG_$OJkGO9>MQZH$>{JjqTNKEbgl947XG_N38am zFbdmRTk67{tyXiSqocY(a_g!)tHZ3IE{s>YL;!z5byHJoE$SojBD_wmxum>wskN@7 zwIv*>?hJ2S+t>+G)wL0;s09oHD!ig%et4Q@Wd7oCQDyPUg5}nNr7N-vr%%t$o0u~( zXMFa!=9boRt<8;{!PmI9*2b33NIMEUyILYM8zL={_QqNytJ`bW&fGL@ z%D5?$vd1-Kk1L&!J+7m(Ze}eAj;r@6oj{~sgfyA8adut1@>uc-%+{V_H zne|;wO~@=OF2u9CV{=RGTEMk-b&Tt3+1S`pH?Fg~rYX{a!mlN$3u>FHJJ!Zfw(7w6 zdMiA)wYA9#H#diyTQ@|St2@?*BW;aFK=TSJ3c~YOEG@Lq;*Q$twn&)&h*e&^WL|h_ z5hPvO(%4W~-4U7J+EiD;t`8TNw{^ADcCr`iBK3_e&*YSA{gzo4;^idO-&rT7(VS*O=C-S`)0}rN*&q6rge4JPwNp{ZEMSh zNPA~bhKLTj7p{l$iTFCg;1UFVAY$J)H0T)Q+u95*fOtEodNhn1VJO=p5E^9ET-}Km z)f+u3EL~EzxTw;SAq08R^ztQH9L)u7ZB3gCnwtDkJs+bPT5$gO@mdL}wvA9YbVo;L zhm=f^=R|o7B*C6#a?@D)=LDnJ{W z^CnCKd$e~NA{=RNhqc51_87a^V=C*LsvA12!nF|$Ju%fXOB(ZA-QHfk*@`sRU_y?~ z*JioCsZ}V3@z**%))k5q#vg~uj3D-*S$D6PVi?zUpl$S7UQ#-51x6NH%-#p!25W6& z!&;wBnOR%3QfU+nzj8EkaZv@P8O$m*P1We%u9l9*hL%WOcJ12gc57`lMm$VO2d6fl z6vuz9OdzH&^QX6cjEBFDxo33l}V2Tv}K_ za}25U5W zOgS?^M`Ij^a4#ux9B{rWuP7}e73~x@>e(a`I~%|=J-@559Rs09Sz%=x@G2F$J;m@q zBF*p{FyUi<>6<@1DcJ>jj7gi z7pvjeqoJpMQ~F}&IGAXWFqAr}pHQ!g;zH^{xXglY*#@~_et`Q*J=WeiTED5Tu>o&S zZbZ8x_2T@&i;0}CJwLiD;$3h$@ZDxw45+^NduUb}ePO>7v?Xrq!WfjX-w74nkT>Q0 zMq1hNd6rS!F!<3^$uudLZV6w~g=S1Gt|-S8SFl`Un9msm}3z3G~2f^ zJ_?5b+r6TYHs@q-LI!j3%5X7UuF7!5+{IQW98bE2t-4t-Wxkiy<4yQEdDO~zMX+;4 z7EDGXj3~TQ@lEiieDGNdwkKe=Z|*f(#p{n%cbOu~wVXOzTVX0T(ig6)X$qg;g*671 z9`CSihp%5;-qi-h=+%8t7F1400X{3nvsiT*P4kM%mKSL|Wpr}>;?e?R1>pcLE+{W2 zpT4)I-8U{MHqq~Hgh=%muW!=*5?!5P&f`Gpz`W!4Yj|UIdkcEiSO0|tC-$l`<*i9Z$UhPU89C?nxWYkgEI7&wjfrhU+r ziUK880yh^y-|FZ>z-zafBK5F^b?dq?32~YXuZ>{V zz>>PFsWZmyiVh%lL$$w5-`re-!WNPK@itEIqMJuvT`ocC}+2uAP-$KA(14XeN4P3m47Rn_%@iux7(dAxkJILq{uI zQ1rG|HE=kuYH7iesNK`{NLuX=YF}nC#s;Gft6QssCQIx^ulWLfPK!e|hVJEQ%P6xM z28Z5AKzGvv?pWK}-WfF-*|dvbpdI~b{6Knu;4$hjEGVjg=0a1s+0oht5@?ulkegfU zx|$+pv8;Dav;bk$v>L)rQORL6mKT*RE+`a_r585cf1zy#%<`fIMU`c&1pd-mj7Y4M zW2R7Jy(5wJ7--ZTt&MuQUuQM8z%ONs;5Kry=q&@?17T?iE!(<@IJP@vK*$j!)K|}nRHr#BSWq~qY znD$^URI;To(+WNku!Qzl|=iC8dBSe9WAqE?J2h9K7~;SJQ5xhskn zV__}ALj(PN3%LB)acIM|wpIv2C(H=*HC*=;pxDpk1wI?Pjv8La2^AgM34y|C5QmSZ z>g(sUvWn%bydG|eI6vj(MdBVBA0EBYjx|M0cNiVzBtn0K4p3)nCycbu4a7eIKw>z{ ziVBMh7IO;$+6)Da^?DoHv34N$rLxm!$-u4S&V=mQY-X8p)DRVe%jr^+&_*8Ba0vUu zkZl1{6E@iZ;$*_VN=VQweDbJsIMbeJwFsDx3$#JrTC%V^V(F%P}!l25o&iv&?Mc6J{QZ%m^D~!ce7{Cx$C&WXmss{l2K-Q(bv+cD!sJowW{LK^7rHX`M$iUXlWR3b+{P&V7<*UG+}Y^QaGP9 z3Q^|;Kue3^!qIVRz-IMY?XUEjtD=63*-(V)cVQwR9j@8Bj34B8_IykYy_PqSkD1|` znzXa0ePi~y@nE@eJ#hku6(ZB<`(R4onvKdIvm)`^xkC5DdNDOI>mnm#-$LkqGD=gr z3bN@@V6x8%IMUVyAMV6m25Wf*jmkV46~EH?#n9Q}Rd8inySN)E^)4?c#x`7!5z{6N zCcUw}gIy5SQyKxS?oc#YdTP3|sx}KsBPp>tB7mPn+oBdjM!f0RHw#Od zLc8%-S~SarOSpYkj6Ei-Xf{Ws#~nZHQ2D}?WbYlvyxv=m*u3fmJgIlG_pT$1crW1I z`;K|NHy-oM&SOP*NkK*7Lg^QnZrE5F&}@153nlS+Fdakp3CBSf>$Lb*SBRqZMq^%jj-~sG+CuSAEAkYKHZY1Q(T-%wb_}5$J}HH|)KbD!Co3-4QrZVrp4Z z-?$0msn^Cpxt!ZOY_MCm$P0zDJEAA2T6^&3OQ)s#;47T#VP=;&1g}iL!hJycBNC>se1K z2bZGG+$q3$4gGpL73I`AD6?MoWAiw=#sdQOARD@>+w0(C($)I9^-H)Df)@h*KC_6c z-K={c5^O0L4U;KKdPjF_D-?zj)Vmfvo3q+K(#EpaJS68m)FNmLryg#Rh$%8lQ>kYe z50&ca3l}adDqLhmmg<#45jR17R>GVMVUhG!l-{F+6Qd^|$f(zzmiUR%e_&Q0T#3i2 zoqYD8FJP`Lfy=ZuNm?jObHz?OjkUM&jj+P=i`ic-tu2w}Hf+ooOQ_G&=yd94)mJxB z4dLlQL$#3cCg0q(t<90#_O2G3JLYa|ZC{_;xe;3+t>f6TUhxf3+}8Ho8i=Qf7$}~Y zGa+YuZUfxHT$;{t6LN6WIUyGt6_S>SuF1;n>S)hxY^iPPs?*5hG_ufPm$YNm-5Uyr zpW!K!#=)WL+BB}Ar7IV^f*rj-V$!(~u%{__hyor9hd)3e@A>uq`L$qkV5!mV(sX5pz>vfcZ4D@(w`(~DyEo!VR z8|xb*b;$C?gs9m_K@4Z6Ee*AdXbi-(UNeIB;tL0zU<}>`;w`+x)QZit=2k>xKN8=l zM3y9oMo2%sQvo2|-G5InbD zqastoU1VMm7{GffR|GOL19zwu`*e~2?!bT*L{TuLi||@X%eym*6+ltI?ZHB!WnQos zu+mYOo~_CQ*&}+80TGBo^Z`Vlfq)n|w1p$j#&5weAb#P4UGyCn~=P*Mr*1Y0=sv0>w zISc<_!H*fJL(xworlWh5jW@yY4?uKbf)gCJFOW5qUniA$`@8Xf0~U%+>?DXle1}VP z`No@HA)*-C;4^Co2hIs~qIFqAA!_KD51jz8fEP_##QNHg_ht|O5M07d3PQU{J*tOo z;8FEUX8aN_w-rqfyr%xhj6bS>nT&ra{1l@saJ&7etsx$@UuMS3_S+`oZTm0G_>28+ zKp_4dUUn%oc$WeT4T0vR!sn%8A9#76VDD>Qknu0|u#Jp|H803`)c%SYU)fhV zq;QoZEXW`~H5W1BK;6rvZiraKunbk>*Nx?Ym$WospM-;D;NUanf2J8hzA&;(7$G*f zSOzZM4a69;6B+Q1MSJ3UR(-v~FQUr>4=G&lUKV&({hFy?tE0v=9#t?A%L4DIkD33m z`rPDyt`4%YgZ4w1HUbaXUoiEB{YO^)|4RE3Fe{2{|LX4QzCCyL8D;?m23dj&Bj7gf z7(~H!*d!`&Ap$|31LF)DG$3%MNntN9YMe)D#j-!xcdUAL=jZtuE7^#3^9t~ z{Z4INedl)a{l9eQ+2)Y8>eedHCiohEOn-@*7Y1-;3v2dE=a{_ErZ7okN= zyUnrMTky2usDOluVQ3M*n2Nipi>ce9{(!FFIm5=iM)c9V0BI>G5%(qMdNXIWf-Bsn zxzI={Xz-Z^n1{;qdU!J(Vua6^#wP`zRJU*@yM`Uy9p3pY)Sju-wI#uHcCZlstqwuL z^9>I}ZgCD<2O%=BNZAwG2pGb43^oD=#|y^|5D7(}ID*LAAH9@CJ0oduh4vSYAo=+N#&8K#QSI%Z^+t7IZrq#Im&%&JU&Y%HZ5mb_Y2Rm%Ts^#RP{U zf3KVhq)DS{!zKKO0gNS!5v26RJXZ=FDSjQl-$`JuJ1tZ_R2q1 z!p+6s9f@2}dNbwmE`)-!z4B9}T$M=BY9x0>uQI#z?ov2gDtA_@E&^S_!-i!nMYfOX z1Mc4Pf0Rd&I>@-JON*aMuS!e}d*SZLu~4dgCHqQnzpvy#i8sq>g_OV1{P%C=vr)$4 zv*(T-qo`>LLp8gv$&EsRJ}dva zTz7n5nNZc>SXYjUt~VMJJgqut&U#QzmpS)2Lr8}He7zg-j z{sag01jljZ(fm&T0Uy)@{yKWlQMK}xk&y?#t8bvtH#nF)N{+zx&+x0^VI?~9tvv<)0PqD}=MD_`PBbPjm&%sv>EgJet4ez2~7QkaDM;&-R=?V-N;DW7>xq zE`8K21N-((Ok$Q%j)Xkxz2qSwek#<-V6q|>Xx?m6&Ipk1T_)@07z?J%!q2UxkClq~ zR9JG6ESD;@H7ZR*Bb_l)JV$p^+yNSw=No25fn7g*UU@Uf)@@0OkA8#HC`oWO4XtVB zcOHIUPjAhK^OyYV3gE0Q++GN0d*Mfg)H>fPT~D3&pF7NcIpa+fB)>nsIv>uf`Mb!uzVKzL?X{&hmce8(S7I?Qo1Ng(KcT;d458n(?Il7ax z0!;B*-n~|EiyPt>HxzS% zJ6!T&T1>ftGHBk9O!h~iBIe?Q$#5P>VLp=K>%%RftQO7vt>G4$JqRWGS9cCo$kNpB zsrlUIR!`#C42nJcgEyCnATV>f+oK zE;!pr7g;+T$xxm9`Nk(qH_*NeW2@!oG{vNQ(ebpgvKMe`6bzrAW>6{P;Z zUcr)cb!PUu5V_ZVku>Z@?|JUEESZHt|20}QdW~=4fVXgb9(yEy*3c-h#&3l_fvGk1 zIL)OWPkljZ`9 zqg?iZQujeC8qMc;cN8I=L!&vBEQ#PfQzUq*&G5f_c&bGg8{AFp^qz1t+9|&&J)e5v z>kF1pFZ^}EZRmxWi(bJBOkAmywCf$i!u2;xaOE89tj5myvQr+pzMt5FJw+r3?!y z^;pNCbbk{`H7=I$JTznsk z*c2cCefS1RS5o8r(1_=-AP0xI;SjT^d^xDuX+YTt-m#K*tmLIphSq1}S9`?v4@Ro8 zIxR+Ez5(mEN>O*!|Qpw$eEj7 zlm^(AelQK{!SqI=Hl|-7>V@>%piBxmi^JF3uj6-ZqYRUEmS-8CM$RB+@{F_BBf;~w2A%71olXVwV8wC=RO1#_S$3BH`m|<|f&%GVv18(l|KNh1j zUl6>F0RhO4U<>9wxOq9;jA0WuoAX}HgTB9+w+>?g=ymB$G%mlNZoyb!2Kb<}iH=q= z`qc^E)#+!cjN}!ZW>`qzf^WOEw4?uySlj$zUTQ z)^Mqckp^juLlG$_{DB5Iqe>5GNfvK`G)kdji2{oYMrhvNcI=YVDnMleM?eD2&ocS z%$KQ7Wf>;z@Y;;oy7bzDW`yS-dLs5|8WIys?S+y^E~Sy&#kAH3ce8ANB3r?H^>D%Q z>|4+A{nepV@wchBeze{cBk2j;LzXzc{@~#k>hrRxfohUo-J5=U*U@j?VESFLp(yeMWc*~#I$LT^9?Noe?z=jCM90HkRSk%x zhJYEGUfaa{MYk(+7j#sCIHNx5efbjnY4^AEbo7Gz5*)!86q&YI1fQO@y7OQGcKM5o zvnf;_Q>J2_ZH0HA=L|z>gxYsX&{lT3WTfBVG82m4!kl_0e*WgdtiUid;Bb!eJj`os zq_Nbu`9rs{kyq5?mlioSD-*1#ya*r$Y#+Hr$zsmWS(uJU@^*Q+&Unj$2(*m@&R}ny z=hvwf4ht8%YwGYEFZa~U(9iCwsAMLC#zFFiGg?U~{D9V`K5(8?TL+$WchFnx9qtDb z5WH5H9_SHz2tPzZlk#A-k=F?ou+Y83RL!%-hbBP(a2BIWpf;sL;1v^%+nS8VZG(UT zqqHH-xNQ(HsM*^aCNg~FdpN(&c~Whc*x}%d`BF)Oi0MgsT_U{X5^K=?jHp;f@WOK0 zr)#tv^-ouDoMFpHZu6_$mU_mK1WWX&pzT*@fmeuE_c?f`T0G(&d%@yz5P?WxJ-UmS zPXXz3!6>9jZf>(8Kbe~H$)iw96jZkVShA-Q!!6G3!cSD-i6%=^$ZxT8tMJ1|R<%!U z+Uo_Kr3+nX4Da10VBlyT=P60WHa{xd;O;B*jNla0nmQ$Tzw-G1yJvE_2C*`nlD!9T%G20_?!Ph1Tz3}eZ9Yx-K zyjeSpd1v@fE=3o3=~VRB$m^jeiS)3}=(N1RkPIXS2JL&47al;C@sTmbt|o~#iJe25 zq|kV)e}ZBb!YneFmFum3zy`~ta5kU%-}%lqY#rg-ob9S0w>w`E{*|-H1-{5#NzXAW z-QR$c0j*z4SOvSn^Fx;^|ad{6@ReR^Z-hM-k{=n!`Z47Xe*7r*dXG}BL$l0-bxB|tNUn9 z1&V~_R3IS})w9*XrvlMWyYrL+&2#Zu74M56HcNpnJd;vMt3jkd6DL9^CN|-2vC|e) zpqYj*u0Yq&h=4*C>yA{>kw8m-CSHZuulki#9wFK&PsC5kGx1mW5e$~@*ep5H2xa+i zl{%$65h-*>lS+3qNzBllh!nb`Nu@h&N^%#aJBE(*Kto7}(4DZ0)}6?w6LhCjmnh6S zQ=Eyq6KL}Rss}$CMR+#t>l}&Ie%eUPTbyM|iI!oDqo{^(kZSa_CjhD+kyPLjnf4Yo zi+ac4BClZgIr4_H(Ww8?mmtNLOc2p24k&ZPlZJQtJWSxuyToh4>Yre3w-qt?s}WiN z%_|o}oTW~9pwo1A2=CkrZKqdly2&+A?Qvi|t}fl=X$X&kq+!uzC8UCeoeJv9 zdh{x9Vmb)ix*lO&uU6jc82BA?T<`XNNB4*lqWQbE^5f1xlFk@h(8#2S%NfAILn@>6 zXlC+3vC2y$l{<1$d@3rF+JJN+(9275PAx)dL;|yC+M#?QH zKjW*Mb#jX~U*{e0fFHm|yzyZFEZ^B~Ginft5FHof41*jY0A0j7+PpA~zvPLKs5r3_ zjARDh*&;&#D%0KxdTPQeX-e>n`!bd&Bk*xr*;db$^(LI?A$mmj{`-RzO?P?}Ff zWPO|*5hsV@O?9|f<>jcfSFg(^3$JOD1t_L%4=Mgan;%IsxO+@1h5uaf^-p^j*m5nr z+Lp^yaX@aw54i?+k3*f(i9)NiB8*;5*yHn3v@fw4F#>QiQj<&%sk`@rZjA z5M5RrCD4+RQK;s*-h5JxKx=-K46)Cwq?$TIUfm7-7_p)iV|CRXn$ z;nW9nKfFPe6+YXE$3#;lwCxoi#MU)LBsRdsn_6Iy3{kr9iG(Pd zgeaSY3NBUCT^cy_6|7gYP0p~+!mCb#=AU$m?88qvP0UUtYw(0&6di+D>^VU)o^Uc4 z#d4kK1#MR9`^pbR5guCWHDjJudo1oxIGGBQu3+mxDW0>#5dC2*Ju{)t@*KerTLRDZ zv-lEkC3+SaO#Q~h4CRNtL=E^Q=Up0nhPLu|9n|z_J|7h%0U;vVU4y7f)G7dH9Vy8j z7tq7Ua_3=kZgyX!C5^4_PFgZikDReE`61Rxm-2qt)JY+S%aFrmNZQ8{zuLui0v;_+ z9YGO~xEPvwF=S*Q<34h(qu4VhcG(MRd6uLjDfp2mI2ZC$=Qwmf$AOp&rI-svQV7+# zAPK<_CfDM5wug8m&;x<->xCK5i4FLfg<}F^_&x_8<<`#*!J&o?n??D+3HaIPV2Vh6 zU|J;8VmS!iW&Jq(jGN##ViPGbV*Sc-(-spWIrY@nIy@(yDhMxAv8q)A8$rOlb__NG z1~(O6r6qt2Y?9W~!mesPp))bZNq9R!53}A-jqaqnGp&=O|1#mQYDJy?dnyu|tH(&O zw@3R{z!x23M5;wCQF^GQ`k%@>&JINzIdnbgO!yy~sFR0KregdVn`(TNMO zFEWb0ef)=^F&=p=DgLpS<94od6E)8dNCZ7?$jG@RA(ns|3RrU(CNM`{Mz@z;4&4+o zekPJZm=;1Zf>EYBqu0J+S2M+cUz;f!Ouu=barkM%Q|oB{N9QIv(-4D8#8ymf7e@?^ z7|bj=dG5>-e$p9;>5sIWJF^6;>G>7|wtMnE=+c4XyU~$kgNKFT|EOfCreq@sm~O{l zBO-g@l`7N6fkb1mNl_^CO`#lw#&S?^-B>gu@=2}Z6vfsN%OEjQ_ccOT-;>Fq$-+Q1 zhJhndjUz9XihuDqAmb(qLNc)+VlPA7Wus6aqlhE+AYgGFi_YoT3!z>Yl2EAm#S?%` zpddprWVxuv!6pS=AadY2G`-Bi-MUsj@Lc?ohq$azy%4I`;YC+KwP&(}!8W36t1_HsuLoK)vIdMIb z7_2a<>qXi(Vg!C8K2k;bX4Yne>=7ff#0hEtK^!A-?HRMh(T;9f)&5VZ6XM!4<`Rda zusWmIj>_^24Jo#d2jN;(3`F8clp%v=d+{`Q)07srv+@q&bjrb^$VGaQ;)tUXbz4a@ zmxG8yo%oh)!MIc94W^B#qmDfxtf!dB9xm13@P1G|IiVuPV1nTnr?tC*zgsVa?DZ4L z|C2sKRh9J14Y$xAkyH^+rzjyon_=n=LA~YNT?(KWHyCevlNR*CPp>1~mdX5xJ}!3t zpug4vN)a{8UTac~2Q~tRR67P6u@bLS5DXY1Z#nCN^mREJrFryuGPmaq2QvIBAXlwa z{m4pZCk=@^olSB!qr}9d4CUvILUoLq=w6O~fQ34SO>Xf0t60ozr9h_8gG)>TO1+si zt-Q1gmOMsw$K5#>2;hPl8kq64Bao+^yIh4O%0enjyU2W;a{-78W=H}uK#_!~l28v; zD(GnJIg{vj#?1~Kl~{{~wlkmeEp@2>gdC`{&P}k+WF{?Anm$6O{fNs1aM?CTr#x36 zBrPpZ=<_2k0|9oKOk%dAV{v{sUrX;2qQ@7@yIdWuD(@6DhXEb3I-*WNbpsV75*_R` z2_7KQH{kwQ`fPtB2fkKvqOdcOnmNw>lp4&=_bZw6FcT*FZ;C!z&bnb@NnB-KSR;CN z1Bj9gBU0=nFr;9|z+eE8cZOB%bb3s=;w)d4o>BpE?-xg>0O9>{EwO zud1h_R;Z2$)w2(N`qa5N*&+mp+B(|^rh{IkJ;X8;BD8R~xf&k$m6c)npyeS1JpzQI zl^lBOX_FMprooNw!sfO~;#AWA3B1M{RQ)* z+0`MKLNPy@>M)*H#QaCi)WG9q-T1IkSx`ro(gXF>lo=;kN)H>oEYN?~3 z)%tKhUbpa0hcG|tit%2ngjdusf7xk5%TQ%k@Eh^AlKIgDyCd&1j?b9McRNd58!yI$ zz8%OgM`!*nH4n#@2cGWA?{nUUL>q(^sP^D}SfXIQo;q_@#w$BAzpr}k+et6})2? ze(qg}qS16hoDZ1D8+SYW6Ff2T1=t$Q{N2X45kLam-L92{SInTwo{RohIklFqwh z79G)LygM47O&@(UKWz~6_0AS_RHUhSg>lHwbKFWO=Je z7;34_{iDx6@munFTmFvxcAo2fzLz}j&+_zmD$V;M{C(~tE|&9mbAFvKm>&uFo5AfN zzd6qY)4M}C>%(ayD)WDBHlPMPuYebKV18$;Nzuhwcd=!kKZ`Rk%pYUmllj2m`1r#j zk1xWRHog=ah51)}zR#}<-VFFnsii4wpZxC>|9$EYDgJiq(-eOU=K|2o)sr1|THiT{ZxT48N{;RWX08YFicaqjUcsJA7Bko>IQQ z^v*KArfhQ=e;Y^eaRC3f6*zZ)XQeoGFV5UE-#QY{uP=SD6o=uLbl~q)epShQ>%=<$ zN6GD_IAuN?l^7lR?u;Yf&zCd*M{^9^hXH!#M+d)8!s_XIM;y!+$Fi9poq_JguUUr1 z8Uc9{h>og$=kbj=o5Ek_Z*blgJRk7=!5t~SExjY3|1@kh=N;rFAn;GM{@Sp1KA8|04^(HT4bqW z=&JDbDk|cm>v;SVzuwdJq(B*?=1=%+_424f;Zxu`tb(dXOfaL zMd9O=oPG+Qsc`*JuY$3Kz$ss|+a#k-|8j-T$ig2`cw-j+yuv4E;m}zje_R&+fx_#v z@UIj;I16`C9+b|XW#JtZzB~&*MB!~&cyHiT{`wt96XK)mB*kCXCP?$Sz(I;%zn)yF z_%Bqr-bOoC;nS6z-dS>HD|~AfzCg=AL&jUH=AsoeConh7!(>n-?8*nk-dQ0X~F z@$2o%b;|pEh3n(T9aGX{ypYq8^=*^1ZeRbU+J$1U++kIN(Ec4 zaQ)$$C7J3V$dI{{T3Zr~Y~&wG+B9TnJo$ zp0P#3Y`?FPO5jv4 zdsTnFLq%}7;ICwZRr^)!SSlrcjIK2rk?zspFN;HhIz0g&nShT^z^5nR*C*gh6YvKU z@ZTihn}OHl$k$s5{9h*E-zVTa$X#y533zP+etZIcRsudU0iT?JUzLC_NWgDRz*i;U z&nDn+06!#0`M;mQ|6Ky^pF+}Am=N<`op9K8G1bk2eetrV}^8~yh0iTh8FG#?bCE)ia;7Dk7dXP54Cat*}JD##UViqadL$K)E9;}7Y(Fs}RKx}KB^2`0whKb0d zw&g>1rSQ>xVJJsisw0|AUHlRjvZh+rJF;x<5u5y8e`Jr0Fll2%gwsY}BZjr7B4aEjW8uhH+p6u$8mL-hhK%BN+=ZGc6Ku9A zVS$suP-zvT_R{92@epSzNe=6+?F|q`N$Ce$1j-0lZU6R0SYuX8k}VL*WRVRRZEVjN zSkfjLnqg}fhET}1k+H%pf=wI`>owY-r6q_A)ctZY?2Jl_vvCt8!p2o(Lng~Ckg`pq z%-Ly5o43nlx#p6l>CMw&@CDXFCQLN8Bd0gOlnG3Zz}Q`8!Zd9$egX{Cz%=p|Ghkqf zEJ(p_DykqS2dwB!C0ntu5e>UBu&T-?HIYf#30E{Ukoi4}5>{YhtQirXT*fM#7J)UzRCd zK-67CCYAt1RqtJ!%UyB!H9y?KNg~7I1rWU>R!0u(k|O>2;4AuPh8=;Rbh6j^^^DjoSEE8$w(l|b3%DaVW&TSKFX zR#mexV219oaT@FfPM=2EinRaI3H{NV5JppvEDOF`m1N77^E=VVjV&$N1hvgRNU;oE z%I0rU)K8Rzw(2>=GU&}}kpzsaaHls(ce1{l3Io=)FzHjPO`5Y>0|_g+eb5mK25OkB zcEVLtDL=wRzbJfQg}IrGciKb-elSEgo*30#Ba9^1iV>q0x3#~R29j=?VKSyx81}9e z<5g|bG*Wz4J8`la|0Yj>G)+kvav)?PguWN%kPs;Zhy;#rAp2BO*3V%T786ln78{so z4&$4~PyGePZbe=?brOY>UO)eN)5I$^CCr&B9~7JoQ%f`Sv`a37O=%;r3B8-3)g#6K zk8=v>$?k=

Z43~D7P(bS<8TNHC zhyzyo+tj+Rc2_HAoqipUb|=ICUsm{f^)RX3aVmUv`W4LR^#;k-^46>H##IT{@n`3M zjtc)%C2(B1ywPr*DnANeh5zjEzXnE8V7X9UHF1~p%f56vG`0XYcKDha>7(7?r&!n2+-ymmF8LYbzuORp{L_)AoH?QZ=K@!y*jUT;X$?jQ6ES(N}CuTIayS>g5j z4DG7dS(?3F`&p;L>-xu85qT;1ZT*5m!8OX&4!uXC{Oj=gJ&Shf{k45%6$g6}o_6@} zu94i@olJ^rU0Q!Mv;{Xs_}9xL3)Oq1g*Qr$?DEp#wYw8+#a4Lz-fFoDulc03NPji{ zIXo!+I)1%hp-qKvhFQ(zHJ<=f|w@tW7a4k*?-bs0puW`no&u==AINfuRb&S2HSCr(45vKLlfaHK1G_ z{_rfE@=BL>Y2Sptmp=0MC>8$z8^nD!Z%sn@W)*(TwQ&L2;g3iN-?dnB-k4Q@+2Kz} z2>)=Ey!2&^_KVPccKFScB*!mw#VRp6oOZ|KM&H{p z9ZnlgaLpZPqwRSwPYD0$S{Z&G)|KrmJ1(NLR9vvvJta9GqQ;G@2mZy;06{UP$8Qp! il|{j^`z0CvjkF4&ob32%xXcxP%{Iw7Co4b}$^H);eWEM? literal 0 HcmV?d00001 diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/mkmf.log b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/mkmf.log new file mode 100644 index 000000000..76065c05c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/json-2.13.2/mkmf.log @@ -0,0 +1,165 @@ +have_func: checking for rb_enc_interned_str() in ruby/encoding.h... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -o conftest -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC conftest.c -L. -L/usr/lib/x86_64-linux-gnu -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed -lruby-3.2 -lm -lpthread -lc" +checked program was: +/* begin */ +1: #include "ruby.h" +2: +3: int main(int argc, char **argv) +4: { +5: return !!argv[argc]; +6: } +/* end */ + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -o conftest -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC conftest.c -L. -L/usr/lib/x86_64-linux-gnu -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed -lruby-3.2 -lm -lpthread -lc" +checked program was: +/* begin */ + 1: #include "ruby.h" + 2: + 3: #include + 4: + 5: /*top*/ + 6: extern int t(void); + 7: int main(int argc, char **argv) + 8: { + 9: if (argc > 1000000) { +10: int (* volatile tp)(void)=(int (*)(void))&t; +11: printf("%d", (*tp)()); +12: } +13: +14: return !!argv[argc]; +15: } +16: int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_enc_interned_str; return !p; } +/* end */ + +-------------------- + +have_func: checking for rb_hash_new_capa() in ruby.h... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -o conftest -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC conftest.c -L. -L/usr/lib/x86_64-linux-gnu -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed -lruby-3.2 -lm -lpthread -lc" +checked program was: +/* begin */ + 1: #include "ruby.h" + 2: + 3: #include + 4: + 5: /*top*/ + 6: extern int t(void); + 7: int main(int argc, char **argv) + 8: { + 9: if (argc > 1000000) { +10: int (* volatile tp)(void)=(int (*)(void))&t; +11: printf("%d", (*tp)()); +12: } +13: +14: return !!argv[argc]; +15: } +16: int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_hash_new_capa; return !p; } +/* end */ + +-------------------- + +have_func: checking for rb_hash_bulk_insert() in ruby.h... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -o conftest -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC conftest.c -L. -L/usr/lib/x86_64-linux-gnu -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed -lruby-3.2 -lm -lpthread -lc" +checked program was: +/* begin */ + 1: #include "ruby.h" + 2: + 3: #include + 4: + 5: /*top*/ + 6: extern int t(void); + 7: int main(int argc, char **argv) + 8: { + 9: if (argc > 1000000) { +10: int (* volatile tp)(void)=(int (*)(void))&t; +11: printf("%d", (*tp)()); +12: } +13: +14: return !!argv[argc]; +15: } +16: int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_hash_bulk_insert; return !p; } +/* end */ + +-------------------- + +have_func: checking for strnlen() in string.h... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -o conftest -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC conftest.c -L. -L/usr/lib/x86_64-linux-gnu -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed -lruby-3.2 -lm -lpthread -lc" +checked program was: +/* begin */ + 1: #include "ruby.h" + 2: + 3: #include + 4: + 5: /*top*/ + 6: extern int t(void); + 7: int main(int argc, char **argv) + 8: { + 9: if (argc > 1000000) { +10: int (* volatile tp)(void)=(int (*)(void))&t; +11: printf("%d", (*tp)()); +12: } +13: +14: return !!argv[argc]; +15: } +16: int t(void) { void ((*volatile p)()); p = (void ((*)()))strnlen; return !p; } +/* end */ + +-------------------- + +block in append_cflags: checking for whether -std=c99 is accepted as CFLAGS... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC -std=c99 -Werror -c conftest.c" +checked program was: +/* begin */ +1: #include "ruby.h" +2: +3: int main(int argc, char **argv) +4: { +5: return !!argv[argc]; +6: } +/* end */ + +-------------------- + +have_header: checking for x86intrin.h... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC -std=c99 -c conftest.c" +checked program was: +/* begin */ +1: #include "ruby.h" +2: +3: #include +/* end */ + +-------------------- + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC -std=c99 -Werror=implicit-function-declaration -c conftest.c" +checked program was: +/* begin */ + 1: #include "ruby.h" + 2: + 3: #include + 4: + 5: int main(int argc, char **argv) { + 6: __m128i test = _mm_set1_epi8(32); + 7: if (__builtin_cpu_supports("sse2")) { printf("OK"); } + 8: if (argc > 100000) printf("%p", &test); + 9: return 0; +10: } +/* end */ + +have_header: checking for cpuid.h... -------------------- yes + +LD_LIBRARY_PATH=.:/usr/lib/x86_64-linux-gnu "x86_64-linux-gnu-gcc -I/usr/include/x86_64-linux-gnu/ruby-3.2.0 -I/usr/include/ruby-3.2.0/ruby/backward -I/usr/include/ruby-3.2.0 -I. -Wdate-time -D_FORTIFY_SOURCE=3 -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC -std=c99 -c conftest.c" +checked program was: +/* begin */ +1: #include "ruby.h" +2: +3: #include +/* end */ + +-------------------- + diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem.build_complete b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem.build_complete new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem_make.out b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem_make.out new file mode 100644 index 000000000..13cf6977b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/gem_make.out @@ -0,0 +1,18 @@ +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse +/usr/bin/ruby3.2 -I/usr/lib/ruby/vendor_ruby extconf.rb +creating Makefile + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-h9k7jl sitelibdir\=./.gem.20250912-3428-h9k7jl clean + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-h9k7jl sitelibdir\=./.gem.20250912-3428-h9k7jl +compiling cparse.c +linking shared-object racc/cparse.so + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-h9k7jl sitelibdir\=./.gem.20250912-3428-h9k7jl install +/usr/bin/install -c -m 0755 cparse.so ./.gem.20250912-3428-h9k7jl/racc + +current directory: /home/runner/work/twilio-ruby/twilio-ruby/vendor/bundle/ruby/3.2.0/gems/racc-1.8.1/ext/racc/cparse +make DESTDIR\= sitearchdir\=./.gem.20250912-3428-h9k7jl sitelibdir\=./.gem.20250912-3428-h9k7jl clean diff --git a/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/racc/cparse.so b/vendor/bundle/ruby/3.2.0/extensions/x86_64-linux-gnu/3.2.0/racc-1.8.1/racc/cparse.so new file mode 100755 index 0000000000000000000000000000000000000000..17742b7df74c8304b78e830f88ebb9e80c9fdc8e GIT binary patch literal 97128 zcmeFacX(CB7B@Wml$`Vv5=!VN7H%NaLqe_|7trrKCv+(HRVl!9{uwF_0sNC6Lonfio+ot2a}p!$8u?d;T?PCWV(9?d0Je2-Bi=e|GL#{ zPXs?ZdedzO-dy_E4H>1K?#jOs*%GE4q_ivbmjZGXdAtU_(AzZ8W;FQwC+PGBJd%$B znjD8e3HFaC-{%DUX(!;1J^}y76Y%H5LB}h%#|ik?oWKs)Ds-H7oqGa1KSK6+jdStK0!V2J%Rj?6UbMcK%RXv1Za5O06WL)4?Q3lYA6i)KPPC{ zy(g&880d63j-6@X9nb$aoxr~T1oo>>U?<@OcIr+b|KSPbLnq+xK7pOdC*U_bft}J5 z$Ul1m{ucP*c>b(8fq%Y&o#Vx~-~{q%;2%%^mJ`(HYiU=i+A$IWxYj^luA-vZ^X8Wo zEvPK6s4OZ{MFo?_70oQED4AWlpt7W5(zsD`=a-dCDxN;K#Lhc@PSK3T#YMA9%Zlfg zURI(irWehgQ8cf(;u4}WiYsT#AyT@qxT0uwNhR@FrHlKPmI+!`HP4NryxEkPKmFpO z8FPylED+V1eanjHiRF?>OUg^~D=Ow!Fr%Wlbb+voE0z?MlvP$NA$LJ#MNwJFBDSis zbOt#kc@?uAr3Fh+;!LuswV-l-g_xLCRW_q|?%ajUK;8OIFP*K5Xk*4DMKk7HQZ%c$ zbT0E|%qan<64jVFv$(R@*kOS)ipzzTrY^9XTDq`EV^w7(i_1%9RF=#XZ7R+wUN8r> zD4Aufl+Q0GO2c*$bOdyYE<@GkOIxNRMU6mHvJ_z|nqE;-EY6k&RZU+|a;Y(hYN>*< z(#oP4<;4}YuSynI!f`Xu?xF?7v!v~4Z%IX2UnILnh(%Rp=vCwrnN=i>DWbZVZq2zMSXkqR|f0b zYXC=s^?&rw(?HAHfR0&Z2_AJ2V~_JZucoD?Dcp7ZqIp%VN)x!NsMq7u>?_TFZdxZ> zFH`Ru4L_v$p$7cBC)n~`8t|*N{NM)sa?Mxeww|$X=&kc@ejE}rg^;tu#ZR(DN1-MAisth|wb8|=kIc2n#b?>(+U(*xzDDL&7atwMxpuhti4Iq(y)J&Ti@(psM@Mt6 z{Vsm0!&T~liyw6HBQCz_yUaf1;=9+QBQE|i`v)Bt`XIw)KiR!yZ9j&f4qyI<>F6u@jJNq(_H*h zT>LpMezuEW?&5cJ@fW-JW=J!;+Qskel3(TGcX9F8y7*mP{Piw=Hy3}Si{IVF-{j)= zbn!R4_`O~Hljungoblng?gmf5m7Xm__f7(L5-NULT`*Ko-3+M)M#xS`njp02`eiqj~TeofxBekQN;o zqj`W9?Hi+cARFxvqj``PO^?w$K#O`}G!M?AKmO&kKScWT7|nyS=$kQ`2V~I~Vl)rN zqW_A~JP?cC9iw>=7QH@3^8hS*r9;ZM-PDde+6en2ZGU5|!q4~3uq zVMO@(Bi^vJJN(Ac%BC>zA8jDM{(wEsj6Uc0>fs+FTUB+R9IhGu0MT&mSCz@(y5ZYF zM9%%QzCJP&v*m987SPs(&~?hEesmEsb$v*0J;Dmc#`9#DJ3CzahrJGsh|nM5y36Au z^=L7W%*bII=@R+gMsgxw+Q{I@hc=QM;rs*PaO5Q$86VkUBU2+!*~qlWBQ{bVxz9!x zM>YuZOf_m0S!?5KiLbEn^~4w3_(tM$ZG1oQaNUJRUW%R%#;jSrg6&s;rPSo`YTp(j z;jX*Uth`ADwSS%+u4@sleQGPYs|Wm!)c{ec!Kr;@4O~}HcSSaq-vR#$gJ1h%q#H}9 z-LuK6>=%BczMytn_6{o6ep>T|Rhbs)4wdusF1S$pFLHSUx(+nd5AL@8OW)>A$(vkI z`wr{!>e+B|TDaz@FSz1=Fk4WI4)Ig&;oX5Me|Y8Ls_`f%a)c&Ur~HKH4dKtH1x2Hg1Tj)$W?!c#7mO(0{YLqv9z@x%{#v<#Ont4*%7%EX4V$g#TC}>s5#I+ zTyrE)_4mAAcc_q3mD38Ij|B5-4lFOIIpPVftU*=!9A?>n!g4`vK1_HI?+jJuh9L*B zr@3W-32Rx}cJ4MP`UWde`$9o&(`@Wl(%_W53-c~w@54G#i0U4^d)C@;-KeyK&&T2u z-owp_vW=`BehOL;S^v8jI=CgTW?34VobnqMAhi4sB$ch%;KJJc{gHJLE3C^$BUAQK zpq354cML@btVA^JR$q=z}&-o#KHAd?I%NNgsrrRR_H3kC^8Cq7&qa% zfbFcNRqsR}{p}cA+k*FDFMJ2PD8>xjyrdincdxo*&}yXo=Z ziWkUy{)b@A*UKYM9;vUdtBHVvkWLBLJ;Qz;lU@0wkc-j^Yrlo!>x!@4LfQRMo^$eE z)~5D_gR3HI8amYW=Q-nQ_vB5%Sl~GM1>ua#Q;Obwo5LkDTsv-Wf7(u)EbnQaA>JS7%^2^f}D7 zy$ZiLD-D~9 zVz31NfK1n71bwp~$eW}W4*0xkYq)mu<_0Ur%9nyG>*14d-BS!u-Zpx_p!P6!3Bgr| zL0~b+#cIXwvhpes9Gey%vOBoC7DS{!N-n6~g?{6!yqu%qCya)IH~zxZzY+QQoci9O zR!u!Jj{+NAcSc%1N?p-1K=v~Sz*E7M!FW3C!9yU9Ssj8apA@;BIj)62*K}G|CPgq25MToB`=TrPHO!Tdg79_)hpY97dOXjSbN>20HxdX6I} z(#fvDxRu`r*Ib6`6%KiD6Zp%QV56{^)&F&W!H`{uYb-CgvMJ_adye4raD%k!84k-n zha-MgZBNYsP~g*_9Hr4j6l;$k-InODQjuKKwrGx_>+JCle_po=b;MG=xv=&J>=W&t zi?O^B79-m*bJf*J%rimj%e1k2^gwG?I9Rp=lgz9#pd5iTfW6*!H6PF#)*tBe)Gba!BG8RlHT8IqsoeofVw>JbLtI8l^QFLOFFP z$JtCp9)_wOGVD~r75{*Z=HA{im?T?hA8*46GFCqYz~$NKMK{oh9YUx=P6JF(I52ilp& z%3`W|NX)H3X>^&1OVy}wP0e1gu>G@llV;oaQ*fCKAMr%pfPbG#bJ$5X&)+QG-Sl5N z!M0O##9P%Av%^Lj3a+>XO0WZotOW0U?}X>z(qsA)hJzJc^B($K zq{9#5YIev-a~rP)?rc(nE5huu_FBYQa7~DG%@0;&EGuZwAx+4u<$bk1;KQ{twm8h! zF^jV&oq)FHnz70*ulskHQxk_$?UnS{jR0c(E}n6XhW5gXyhZ?NCB!?7n?0)zaP`HAd zgN6R1xAOK}bsu_)bg6r!%jD|$;xGeHyE__TpObl2_VG`~`jsdhZ9orp?eo#UHj9-# z(Z58@^e^<^@Ue2MAcqD0mzzYX&4cGn}c>#pXKG9w%1GDNn2Poy3Vn7I{JINm ztIv}iF7D{^YOneZGLe6yUVUDY4PIgGuX%gMq=BpC&W5^?TJE-2(@6e4O!#{X>aN-k zfpFbq-s?7%yY(vE*zF8CJDJE=-|GJ9w8z||zKbT!?u#bzR#lqhoa+n@*Dl?JhOHfV zY{T{hosdYJWLoBHg3N(!00HK4>?(B^NfNb+V|V-Fsv0?uT{{ zcI~BMF%;=c8>>Tw4!aQ0(!6s~BHnCRU&Q4&j)%z*K{r=Is!@J^Z?z*OS88>@>4X|0X8oIFehe z)t|wETqp~qTXCg+9ULx}*Rg@JSQc1EgR8cJO-q#r6@$;Ke$1}z@6j3uZB`=zszpB9(v)LgV4i>x(9>V z)F{dd>HP!BT{T@oWKK9&a*~oN4*4lu`y)q2@XWWuUEhrazQSyVHG~shb>vo1#v&N& zqXn4#%=vxfeB|cqkw9^|LwgP7n!2M-K~6AwG&q)qZ1^WAH3z+(xt2-yC@Xt#*u|TCria@>W=7!=ktF-ooCuV9nh~ zKh?e%%>vidpSwS70&vJH4}{n1aV9%2#>IpwvVh7B+fbXo3eC&G#1!ozPJutK$ewC? zU~g7FwoP2X-hT${h(A(6(o6J74JdK;`6>mXMZcc;OHGw}O zHDAc6kdeKz6F5A-DHG%%*`;egh}`>`U9FX+Xg4=BqaaG>USRt{_NRWRIpamg z4?{>r;vnpfU35RTDSCg2{psuOeJ#QmhxJz5X*4}nE;SRX^kNJr(~I-qIToVl!c}6Q zV{s1ZZjZC*gHprMbSKKaD^?CVf214+kVa<Ps^}vSjT}I`u$_5{hF1@`4R2v!MQn{)xQ?`BJ>`$cWhuonTDZF7 zHXKuOHI0w7JAj9&ESraxc`#R4*Yo!tl*Plj7Ypi^;yeu^Sn*cy+=dKn{UQOX;j{w9 z4zGkUEV~gewQIlO275iSq0TJ=qmK>QRooBZI1#duYal3_N8}^xRN4Iv$Y<7kWXbt8 z?2aab^%JoHr-H;@Co-2JxcO$$d+{Dha$@Ix!u-LSPB82J9eybU=^Q9Q@U$yzKlK&|A+WsD!{}B1NkdN{x z|Fq4&mi*-|{v$SjF8Q;`=Sw<}zuV>)l7E(qf1AzkK|cLs%Dvv^CzIbEe6;aMH(0_O zP%`b9=ZWz0zE2#-Uz7xq+CA(Tzkf`n7o&VMZNHzwu1EbBQKOUBNe`U#z)26B^uS3E zobxM+?gtOp?rWxQrfvt~__(r90e=f`g<6u)~jqiE**BKa6j(G1O4 zvc%ARdJXQ?M^U9ne}txJ6uu8sG^M0sK`B0zH0n5c1v9}OB^FRj@w^496d&9vwLgXw zL-A#t@+B&F$R)2n7H7%P#_7!_5N%&eMGQdEX7Eva0opk&DBi>mPbBUWa?oYGnN z$_`Om+{Qtdlso)ld{{`DEOO#-egX+!%%NUMk$hYTXm9;3Af-Ak=+ZURaRFRiniT3X zuWCVMXnIK~ucD%O$@zUR?CRv(k&&p(pM?k^Khy>B3C)^c5$fC39mJ}#OUmXiLUd+S zh87mjt+L~&JE@dcx*&uv3Y9LHQ!*2!!9qzWKNc>Llxl}0HN9r^SQtY1LVa^`2KLSw)H`Qjs864v{e}$fKQL5WJP+S63gs^@ z5Al>hS*^XN-iWjbVDW$I>o+4k@_K#!ex&jH>gx}Y|7LxC8{CoaeXG7c9!CQEk+wm) z>FxUZT%?2Fsjn|by7k@qdcOA*|6YAPUu&7VABPP{!?=?h2K_EL+nj^+2+|voX5ut` zC+#Brjx-*Qw87CMFaDH+mvz}hWi3v#S|g$U#m4K~iEAO0ng#bK4djh;&NL7An z+Et!WDRFrz z@>OV$Ef1Tc*LYX^@~`!WYvMc~Q+iAi+ev>ng@5yatBJHdB_%G5Y&?>ttuDwPjr@^r zTO(I{!`J#o*Z4hO(NY0!s&gRAZzj$`j6Ubdjnot6j=avBzs8rp+Fy8WT(~CSZSSK> zK2*fdtDrLp{op##DmgB#CUBMK+PKyJHNNY-EPoU7eIM4>e?(^(n`7CQYyD`+K4Xls zZ$ow~PV`;@?)XyLU3jg(0MhqTIxmSnRfx?6I09`69Y@x#E1rm*E_L<1V9T>l@kD~c5gUF3bi5qvFca3kg|JpdTKjhcawtrVa{%1VD zlJCTcd>FA>?Y-7lSmXERdni1H&N_LW^uS3EobRJ3E7NXosQS(G@X{~v|6WYb-Gcfn{~QFr~7nz zK&OXvsU5({H|unVPWS2bfKCtTRCUnh>ola( z9G&Lsbi7Wd>9ky@)jD0PQ`_4Ag}>#tjj^Lf4GndfJRRS0tqS$&hflHQ^c+|vWZz|d z`}fM}-&M2KFlcy0%kHFRD)>bA$xuhzC+|Cu=d3l3QiW2A;iv67hlVC>BZ&Kq)=V ze<21`dRpKwsI`9$0;i{^`IcaGW<8F|rxa$0S)XqhsPuT>GUhx1+0@xm;p{_DIfH)+ z3r4HdIl}D18^i!!+Ed;sR*{8T--lYLq&qPw`I0`w-=<0LgPonU2Y4W9HU3&j4`Jr; zCLKmgJxN{^lbm!Xsur(?aVks;=2}-nC7IuK4~|P>i7I&;oD`he>cEQmVh) z%0ms3Dp6;Z)B*tT)7*(_-%N^hRj z7yMR9e6zlFQUqFUlD>k?wn?o}hjvM6$ZfB>yaYpj#xn4{HHAeEM2VrKiI52;&BWi_ zqzSMvE$KltxH>Zj;>yY#1W6>fz#~X_v#C6p6Ct5|nS3Cw;xnf+Nz43-BB9JHNabc; z!=yT6JriZAjQa)gsEj8B@u`d#1c_G}`vgf-8J`FeQW-C^7?rCs-WQ}=Wkdu~R>rS3 z;>n1!*8pEeGaHG|$hMKRjD9u}${1-Qx#K1Psm8T9^B^3L>iq(r;Pb>?4xl9x&bX4e zzoDs_S5PICv6WML^$3-*l1=5%?J^XJEqYJft@Ph3jE@=~|3b=EkOMY=e5pBm@_?rB z-McN=W7KJxaoZB2$nQKCM}l$BrBgt4F0NXv;@%6=PJHL`$_f?tX)8*lsrFPGp82Fd z74`=3e#BGj01H@T(fF1XE21iCb z%3Gk~{ulT`O30cFUfSHWb0F6K0{j_28-L=5(@>wkqy1^y*z?=-&%nWuy>1h*w1ENm zWG&ImfzLu}op8G>fq=Lr{R-a;HDd;RlhivPz(`dgWFgG!D^&rbhHi1=h4j--E`eg&k`lIs!L^#4gtX7cSM zehqS*g;eecXi_NtB_OIjt#)ORtb=xW``2M>0)L~}B|d$U75@>kCerEY=UCe7-97+$ zkzMn&Zt>{3v+c@eN;59Dn-NldenjCJ3+V4Y0iu@)T8*Y=EGL>(gkqYW9ZW#Oa>?ic zB%X-5)A^%A`0cnRz25v%4XlH_e@ zowY(w`VYf1)>T5pC!Y%!TC0-i<+S8Uuw&IqPG<6tP_(WQtD$7>immG;r%UqHsF<}@ zh@9lk@TRp+h{4IXvYhLM$W4BeVmAsAPW~qi+$6;K ztnXZ@th)u>n7o!n-Xj{Dk{@R^?-gQmawbc;Pl&C_C9Ly5gxHb%7;Cmkh`q_LA+pv3 z3G~{&@kv__ zxdgBI`g=3(J>pAz?4`XhK;k)f#LN8J_6#(!*)3L(13BO1sk84xJ6ar$Yr7DQYdYEr z976U|W)q30q4?NXat4`d_7>(l-Za&b? z8{w!ldkoQNxke0~XSlYtX7{4tR_i2dxs!FQFI2M^;Lz4RMd&69y^98^W~s31nJON5 z(5R}(*{DK``>nVa4NIJ=n*9bZ#-Aqfdf#NJw)enLn{jr9e>ZHqMU0ASHW)r=`;!#i z3jT7%O7gq6r^7!R;ADxfZPj(mI#*QBU_P@{+f(7b=EZi2vyV|z&Hh9U+I}e-S2|gA zhHBdmflGhL3UDEiU88gwE(xl88QjzEjz^7#n$izwi;}oKh9eC+M>RVQ9*h553Ip~r z^K8@S!DH#G>>3?)R7OB3-nP#d_1fRtln#_qsjs^8Pw0n^hO4^s# zE9Tm_kw*lAeA{{bQ`Fr0f{}e zmNuH;y>&d;xM-XAGI3wcSL#y7_BPti@%l$0IM!igy$YF^0jbRKdQssoL46KLyi#j% zoNr1%r8rq&S5rVi-Z%_O% zAmWrZqlF2ld_?2JyejqU!9?eA!N zxVObrj0v$l7FP0Un>k{eyWR_Fdq_jutzAyxj$Z0r=-B>~womi6c*5A84=eL%n>k{e zcZe%#dqP9o-H%uAnU3vbF32OiEz;6-y?Il06Kyj`Y(EO>VcITjXuH45w&U+B9NTSZ zJJ;J{uCe_btUOEG%n{rCz3w}-{kMj;^IW#s>%}KGI<~pxwnln0_Zr*ZzzXm7Qkf&R z>p}fR+qX8fJ;`O;@%KZHZQk))qr92fO|(yf9*kr_Ds#kkXHXphiH|k3UF5QjUohMC z-s#vbr0vn(%xYtM5UdQKZRUvWvp|ib?Ux$bzT|keKXPneK->A=%#V!iB3QYQwwWWg z7lEpx?avz8UgWaPIbQtzvtxTMZIAJ04o=rTxf)hh(l&F%_FbUvpzYrp+P?aDwv+Qw z3NA}sLEB@!nKv5SkHX5sw9Oo`y$94T+HRt?oN;-J%Qj;fFf%J>_y=JT(~i|g1^jxp z;G*!NuqS!2tqQc%d@dv(y7+@NpX<&~wi!&fDsPJBcghgcuz?vN!Mo8J!V?m_`#3@1 z@>mnCzADd5o#!5_@|as?k zG@tcwmaf=ZgWIUCi0@|4A^4%30|%EpGDm7{Na%*$Q4z8enEyT`-vPAff&QU+zk>cB zc>^&gg0}#2nO^K<0WHq5dAu9#56B^}0JEvq9S^z?&~l;rm!~z$Dg!kKkW%1N>T!>( z^BX{4Lta;`ACG&QvnCPHpOe=^=K5<6$1b* zFX;nT%5n(t6mX`n`hwMjtlc2_QH7K~__Nh6(}OL_VUUAbqajukZ-;lWyGZtW&&q`Y zt{!ipLZ>%}Qg@X2z9-`{F#AKPFCa9@=B2`E-gRr2r*Og6GNX|{5|Frt6ACVF^3h#< z>#CQ>d8`RmKMW8mp^OV)<`GlI$95T$pgYl&0bV1ri|<1@Rk@ zUjcBy4jok9d2v8D z$H?jgnGOJJABZtPMgr2lbt%P(QlN_`*$P~WE`a=tsPH>vt^~3SfMIhB$dOKc42I4w zSKLS|ZSX%J;(Y>tHbUh#QIlqrYBlVe*^I9|3D4nCr8MIeNbLdm*bF1*bz#~(np904 zHt7qStK}KrvbT$ z1jp-qAoBn?n)x_aN>m)L+9`3`vS00|Q-1IyzS0IMol*^@D*)mYBNwpc=#<-#cQYW4 zlbFd*P~XB19nph!=!hpEvl(DJB25=)DTA4=PDgB^5*_h66#fGcM;J+8BOP(XCy@IH zAddJR$Uzcx#GgQZCqYLfVcU@az#Q~rE)(44z!99dmXeUMaEh)Tbm@;DJxTv+i1=QhK=nwh60DBe#G8!Ob!KnEg8F0qJWXPUPWsZf}KuQ35 zEcDj(w3NXb4S8-`D*-YV>VVXUx~xqT9Q9PJ?A9+B@_67YYtxI6dJZ6c-N<>B!B_)c z3~`V;3^5N)-luh0o6dEN8_eC;*Heih{uT-mfP~mc`jo+pj)a&;biV>5#PK-MhyzH7 zTL5WBf+6k%BpZN0AJUrv>FW~OdAeHIv9Qg*dD2(6N6OF+f=)kxgx1J;RScs^JG5s~ zhoR-~gU10Rw8cP*01{fG=5J)c2`!(hmQh(2DtHu?KnOZ)CZ9?F43K8VW7``CkY*Y+ ze-*(hTvzdNxTIVdJ z7bA<$M*jxz^~A*VjU#6;h|A>}WTmB3XBfopfIOeIdB=cw2*?8@#)EhU$iGRP2jXQQ z{3W+{CW!Zeyhox8L4@I~`CQcFX3keC*JdE&!0Ww*}G+02Z5h zPA*YigV@XzuYXl1q-^G7NKF7pGmV_r(Qr5OEb6eC^B{i-K$^K6$Wnkb(-`nKGT=0G z4P>vSGMl*($ejS(;PGy4RxVNGsY2A#8HA5Q;$c84xzfI!ptgyg-R)QDsYt205V79$ zG~3)6DZBkqh#dh)w{M$)4hoQNZ%@1!*fydTwDdVX(>MQJl&SU><1waX0E_1uG~4t=2|N7fh+f_V{<(!@IRgV zGOB&2&ZW)C!|ZnP0qu5|)&$?WRm%{T5B| zfim(Fyy1zqCLW|sMu)WD|LTS<2S9mjnz+!GO`m3Huj-rWQ|f*>HyV#2#OoS6!GUi@ z4;=U|M0PtK$F@b^YXWGFUp9i*iL2&#(1n1M6{yc9`zW&l^c;ZP^53eB+D~V#2R%(6 z>5l%)yW3kKa}z-BZjGEr;U2@@kv)dIhXHaMWb%E=V7ivFJ8Mpx+@0-#{4RjKJG(;{ zZYhJADT8;qcTtJEvv;BJHb8b~M$*?vM|NjlLGBBH?9P4$@*@fE&b-ZF4j{X;bRcN} z^p|&ZCG;(SUx;pU>eU7k)(`ap6*-b5r9g#S?t*JC(dYLH`Ws*a?Ps^|uq1-%}qK-)SEgcgAsbM}QoGgO>_<@4C(Q5xABa zg!}=3#PhTjH^@@gp)~7ukH1GRq+E##pq>wqmB`5Xl)+55;&%Ca z)KFHUV#pT(WF=YvdL{b7si(mL+B?oleGjDn z4v>{-E0E2iF3n8RUVsFfd50$7E^AM!I14uKCoL9v#yBs$2ZMudFZahx9;{ejk zRzNZV(oCb~Z)Ct}W>?5|rZSs(Dv%)n-OPQup13Q}Sfgg14(TvJnpp(o98s6s(@V6H zr5tXFHtcfo_Vh98@b>f($Ug*-+tX)(JVS!Fr>_Ee znFMc7KLqkV3ErN53nW52JUddVwE4xLCVM^;#58k zAdxY0UKPWn#&YykMjb{b1o?J6&F2ArUm!UEiHuS6H!|QvCJ(YBsLaTm4P*j9N9JnB zWd>^$ndy+829U@s0y1CJ6MolymkVIP3~mx{qSkmE#btfSL)t$12|sFr%b)81MG(%h z4k0SMP(VI0}0ZAk)lVtNS6yd@L=Fi@LKfo z(Ci3=Nwov7IRH=fUbf`9;e1eI$>GBHip^_-v-U(l%07(!SM8T~%Ro;jj}K`6gEL52 z+W~qbK%OtYX3LFljaRY(9a!pDRxs3uRRPUsf2p28`@Wi9nutO?f#HT;12;E|(SsJj zKh_vehh;do80_)RH*#a4H^SqcrYFq~)o@xzPtwmOe=X<*o;Y~?raW6b zXtlTu{EevKoq(ZywrGJ#56go^EwCAxj{p*P4zkUdSv7}G+!4)=%lg?FxGR&(0xd=S zVl`nAJD|NCAagf($IRVYW*_ojV=4D&W2ThY+})!y+?Ig0itRq06x#kAI-eNZ;I%`q zaMlGa^C$9u2gr1hpzV9rFqpF-@&hAuJOqb`%!K+%q;`hAqCS3sTNp^%r$_-Y;DOD;)JwPXXr$|ZHgFWFUK39 zeJ4Od3f?gx)iRrr{|HOj)S#4DNINF$Hjma#88$$Nx7rTx4(M+;<$!lgIa+2P@?T>) zqZ)M3SUEE5o`$|^U;HwJ^}ZE61p`B7-Or)^2|#9DBj;5y%+}+2_$6v6v+nPZKLU_h zH@Pj!2gs~z)clPMIJ0hR$hHK?tlJYvcYs+Bbv-S$Ph*W{-C>X(3XoZM0+0exm$T0j zt>jTxvY8)QDed4FIs2@G)JlNtw2hok8O&}Zre~akR?{`yV?F@+O|&i#9OgN7GZ+Su zT-<3VupsWV{|$ww0J76Il0IcHqa!=*y^z}tke&8>K;9w2o%Yv2z9hkO&|iT31c0CC z=%L|$rX&&mQr8YUDKC z0VKjk&ELp?6X6k%9ZqGgs}q2n1<(=RiaD5IpY3C}Zodz?#lI?sEN`u)> zX`hksDT5i=!F-)V+I1Ezj{``%E&_4^3ASq`~e6nuC1{+f{Eh+d32} zw}Q){a~VLotdaAo7-sv3U3Lj|*kx~q{EYzVvYUY150EZv)clPMI9>KB$ZnxByX;Fq z_5gI3-PG4E%~A$y)MfWW`dxr@+3$gTCF;^;pLe{?*TyW%6MXzsr0lYtLwM^7AYInT zd6mKJV(m~iqYgtk2J)i;b|_zRj2p~^hoQWaN(|*>D4Y$DkQzy!GMLekE;|!)(*Y9N z1wbw(!7h6xkZKYP?K&XW0Wc{|)r(J~Nok)GVK0iXe4gNK!;mtN4%T1UOh&jE@mPRGcD>-HW?{=?U(?vkrJhqR7=e_{oCz`h=3aWRk@Kn;X6vyBx1|ny@D-513?LO<1LRtO zRMe>X8yRpadIw~0r!p)0D3FH%x(DazdRoe0Za0gI`HXWLKwSJ5kXJ-qjxY(nE zY;kEW3gQuF!YR0m1;`Pmk@F~#V-L?nUUPu-aFg#-2D6=D_we`W2KMmIknaevd-x=$ z?glfxl|6hqE5aT=5DNVP(!-6UPZ`YUNDt44+$ezb@JT=>l3)*?0i>7&dw2zqasXO+ zvF^{ZJlV0o!g0z9FX}MG6P%idl(DaY(sF>r-pJvoLTf-=F2=qkbr}2GA%81CV*fCZ z2LTd$qlTl31_qqiZ-ea9RA%g71M(_B$9|RLKZ7-j{l}305FoMt5y-cqF0!9KtVW5fa+(9QCK!%o(0w=g~+R+I&dm0VK3W z&Z`V&H_{I6TDpdzy%6%})4D87KRd<^X2QhKwq!vJ?OZ5a43N+oNuM&9(UH(z4!I=& z3GFpNR*_(6Zv}D-35IqPkoy4$?fyogJ?ez^RknGSCzzU#l%f4MbhZE_v_{UWVwl}( zhW2$bIpt;Nv(OeKc)YACD(NNA0uPZ`YUNN8_{+>HPU?R`M*A;Hi-0pwpK7}^(sJP$x< zH@bF}_W9Lf&9T%Wwz-w3$s=Qs^0@nL=*u$FG=Bt+ybfyq65_x_{apC$+P!mFjc69*!sCHQ}SRv;WC}e!}!4ME_-(#&mO(`s1~BHpKoA~ zKR@ZC5?^)Mk!RY)$JyUQ?L7Wj&JPEzLSFwT@MnW{ptCJkW!b5t9( zj{&A4e$)!{MBH$giJGlbs30$ zKzajwOE6?iDFFvVpycvjV5N3viM0?bfc#j1{}z`{f}^8Vl()c2w*CQ;sgRulfJP!_ zgK17%43?l({6$uV)e<7JAzcFSb_B5yNF@N5Y!BvZeUE&(%6iyJn0N+i#x-cc{4ku6s85Fpab9CWQ70BNR?^C^R2`jv}I z3EocQjrmEK9}+k~c(Fc-lQrSI0JBfm1qXTP zt?FhAH&V)cL=9HuvNoKGnc;kXPszI3I^`uWt;wqYnq=JUTh&CSG$B@41Ns zZX)U?esmLmxCtNTIbKq?G&j-4O>}V+{awV6d>5fE;1?;Z5telbCierZ4ec++S(0_GK`{Q!cB(O6z_Ws+bS4OCVngP;pH$kP}!-vC=Zk#z-x76YsfAZ`b83m}zZ?FNH0wxifqJ}wEyjMu@muay5^R>I9R z$>rfmSa^aaxjgI!vJ>Fv@?Z?aIhbR>EEVrS_AM%NsrU-W7XVo*jBII-AY$>2Ukrq^4wX}|KZR~1@K)=xmnRb(J6^aaTDYUJXT!E_B|6*-qWTt&`= zd?7%l*J(g50?71g)DjyRaHiM!kex?mPOpChvK&CCfxO%4Ee10#a8{AEkX{3j>GeJ! zcZho2xfmQfwK|3cn|Y?ya`@RuFGkjWNWBS2W2ZE79$T)b<(Cer@RAMxBS7hp-9dB& zq|%ghNb_k(>5#^R&oP#zUBwRBnZ>e0o(&7*0Ma3s0a*-?4rvSo8XGVj@_xu}q%u3? z8$ezGNQX4aiH>p;Q#v~2Hxr@54*3h@e*#E{G&;$Sj#i0vNMCRC9so4hAvs6z@;F$W z%}~zXh3_&o0Z51J03;;p_`=FD9dZ&j?YJaJhn*nhFi2-vEkB=xltCH{<^BK(l9BT$ zkuyP>jQj}z2~s7HazH9YBuJ)Q36e44QwDPf=~WiXAYBIwt7($6#$7<}07#IGTA;B3 z6QoBV`w*2Gr0qbq0VGI9Inhye2kBat&LI5<@~;3SNJb~w(a|1{1?eNmegM!x3Sh$F zg_nXg)+k5^Asq!skbVbpMATK{&=Ur!i?*fYgD%#1tL4>GP+tZqEeHDtfCS0Nd2Bfo zq@Kv{3XmWb0to|B`E3{pl9{a}NXCRu8O$A|OIa*~G#eK9h+KlS2uKw`f@IVJjSZL} zT?N?{RA!KF267`nf@G8v9c6cr`ml5c=|0Hc1CSsYon%MH4goxD?gE~G?7sjyNJE?; z8LUx|UWD}X00|Ob&(g0QBo?1ANas00IuD-3-CfHbQ;{-A5g6d(aS4)<^C*!sLE>=$ zHhgxFvVri;s8otbkW9G}BxAy-4CW3}a~8`Wjev#W0147rK*j+iNJcHt*nkPrMUcIK z$_!E&khuT}l2J}{l$&UebNzky9Oy7eOCf(bK!Rj+k{z8!LAn;Q*8p^oxN+g-ku40d zMnU4^+gkw=qz8a(5_OeWcETWC>ja6PFv7c9E#Ej7DTA~F%G&@EBqQfhB4>j1G4kIB z$VmDV$Pqv)MI=b3TnUmf;Zp{42k99W%OJJri(6uV1gSHSjsOXgQ42ISV1hIdvi+&d zAdLYs8X!S3%88D06Z#MrgLFAdXOJdC{%n8*$>=0II<|&uB+Z2Ebbt=h9Zrx8)+k5| zAblx7f^;R2YEf5-Jk#Z6Z<5?=(sn0ED;T7+t(FVVN6H|rhw_a836hcXD3LQk+JgMY z01~9Pf$RgMQbdAe%9S7)6Fy}ycaWyBSO)38u<$)ia!slS@)tmYWYhwU4VWOM_QMnn zkRY`K!k6bHNJcr)QFaF@#L^k0o{;YjkRTbIWJkw#qbo?mAUhPGgY>!+B!e{y(ixC0 z07#I|12R?ARU*&Xc{L8wAty+;GDwrGmaQ*D${>|Pc`iVLWaK{Uqhq_#6{Mz+O$X>8*>@hC?hMu_NT)zL3m`%2 z3nWL>RpL!23{nT#jkAzP7^KNo%N-XXWst&99u1Hn899$FXM$9M{9=FvX$6pKKq^g1 zkW9G}BxAy-4CW5f11y$7x(gQWph*VlQ6LWkBuGXr(AaZjf5Q3h)iq(3128$g1Th(qpp z`=C4V?&AiDhY065L3)uvnqswlzX)j#vO-X914vs~5tH*MkvkPfoP4S?6!`-I@>Ivz z^0}AlT>WB{c18UumG22AhebG8mtX7B9}q%S~vuTf5Pl-<3z3rlD39R>M3fb?FY zlkDi&^+g5Ews#_A#{;0j8^{$-?=@JX-dhamB7pSXav)`*t`Z+TZtoq2NbsiLzP^QL zt*KVaQ)VC?hpelhd?i4FWaKn_wR_#`Cin*6j;1shK z@@oM06!W>RfTaxPo??2kGMr-Wg2EjDnPQBjuaS;SF^@p*A%ILV&jNXd1gDr+fxJwD zQ_P1z-Uncc*>da@L&yB-IHrjQbvVyzIk`kT<~t}w0OA-U=TRbO91{l@djR4XBja;f zjXCC7wvmo$2l+M(jl3-3!+{VlOBu}Vm@%vj9n%vE-2vhlBk60TBaRscxuF1Y%o#um zNYF9o0hvmIjwuB)8vw`LcdTPBE63Kvsn;oV%=uPA_AI14qvUhYg@8D|glFV@_z@0R zc&$b>>mjoiAm@_z0=XL?-%vA3aiZj$MLv%F$Ed)w$X!5Q1aKsQZ0*#?U;*ag za?T>(hV+{NIg5+}`ApR1w0x*mhi5cq2CsF2)%ILAji=@9hv28a0Q;QwCN1hwVftW} zOfc#|H*$cxC_WFq8{j}kNNlu`Eh6jPoZ>k z%mW7g0CuV7^YOMh!{n2%+ce)k>c*2Vb9Tner;f?QlP_~-!_2pj$?U5F=B$93|2rnL zp9+|JRA#2mu^VDOLLD&oM$GJfOy&R;Fne-lhL6b{r~+m$$jp*sG6$)^Bt1zm^U7l~ z2djWthnacTF_}YDz|8;5+;&XnP!)L3W%m7JGEY^3-&~o8kI5XS0;UhsY(}Zu3By%j zzV0Rb^m})m8K0{H)h_-h7k`W9^9P6L37;<}1U}VlzGJaSi!Viwk<4^GVetEQ8(f*_ zP7}tnzv2q?bqPIxtkAiR&=oGBgU1(QnfM~@ZCaA;@?z>{7e>1hKi7Qcp=dv2T%Gb0 zSGyq=Ix*htfld(r#ljG)`61Ttl<$+l?JH08EN(Z0b4X^`nZr39?Gl{n@STe`mV7GqLkz&T0P?9=%?t3S2>|(2EO^*9$p561 zO&&iL3m!ieI~sH@c?Cho-Zan`0Pt9_o&8v_8q{LINdB%oi8b`B3y~1Io^|5` zhmg%nhCeC@-yP=z8EL25QhbwUku9ZP4dMu!0IPVN2If?tqn7+W2gflk$%*VaGj%2d z^0;K8TPzxH+g1ssI-9T2B$R1_{~Lw6@GP|1YHnFp1Zu&B#N{WgYE*j!@+g~TK2zKh z3|`iTqw}ylJ`D7zMCWca^-e%fK3+ZAfjZBF{5M&Ax_Y!XS^ow34PZ1M%pM(1R$Eks zKc?upo{C|z&Hy z;&(C$V`mDF@J*}uBXFnd(2**7D;J3Jy8@caM0> zA;D{UIRJGH52D*!=mew6F^s>XyUjrVRQIGM>Eawl2R2G5@c;{xBYyg@7I{ z5P)rExMO0>t^Y~P=OAMyATi_rHRfFAd6}3WfT^a8`EL?(&$?XIn-TA-<@~B2%K5oz z0IiaEFGD$3090VC7GYF+Bb%K}-hs1YLVr)?9BPVWM5c(G-Ub{Sk=wO7@$$4!I8T;Y z7C-Hke>tj9MmM$M$EB^Z4xC$6r`|!x%p%`<)QAabi_Owvhv}#(RPl0?(Fzu(*b{E2 zN^Y^n$!iw;-yT}Q-Q)CJT$+fPVv?WMy-mt7Gc3T{DPUyX0?w(dN5T}p{90X|GCCEq z`F<%P$C1tVx2CK*Wg6NN-_&9;-V01N7;PO>@y)H|5tMEHB)VXFvWNd02Y1B5`O_~Z z-TIJNnuuW%H(Zt$?=8$OCTTAat#3q)mWu0T&n2q$N>~YW zk}|%fAj<&VjDlmXK`lGkpW9Zg|A2=BzsRD)hvv5AFZNm{lD}BeqgC8_5?l#4Kp}KSZdo$m&i7)%x!Uoj+d$ zZ*T8qsta?Clzjty2Rtr{N zb$Snf{0CTym**L+=q&#M8Fnh?8P;zLGx;i%owr34+f4Fr2(qR0_ESc}h9}96JwJyXTuic+UCRu_-t&!g zz(?!?Jm1(IdHU5fx?h^-?UaTN@7XSLKWn+|@@e%pOO$74=2fU+o6D4EmyIn_p520} zPOZ@|J$H*DKQW0GcB*^n~RmFs%0M* zc!}~Xw6VF$v#6z1VxICWwy`qhS<>=P$f?$|Vaq#Kn!;BxrQd+IebD-A%wpa_lDoyp z9i%s2tuKU!ydlYb%gGJtomcDr7{T5o$vx`iCfR*pXgWOaO||6zom4sPY3Nt>-By?< zCFb5=ZC~e&iANm1iedNjMW) zA0X=n%(r!b-W&u*&Z91XQx#C&n#ULJWqiR#IvpE!Tua#pU^zxXkXYGV@zlr`=-36y zlesJt#rKafeQm8i856CoBmGUR^uW}lbN%NeSs7^3arl_A9q!~7G1)-aT52uNpjy0N z6(C6nFT~m{+$*6MB zue|XPmMo>x8WoT2pTm;|WO>fb`qnxpD-A1CHvW6m(~`UxEg4ce9D-*`c41Py2hM2L z%No(r)TndIf{bA;*^uT^a)Vm60JBk5@t!PNhT+^2w&NQ%q?K-gl!qSB%aXGEL$WLu z7RvJWm-5<(vUUr$+D@&|^jNLhB_eW7FoU-bs-&!Vx;kW9WV;V7%4*J9I;J`##wtzK zQyNx5;+@?|0@pFgq8sUfPFgO>R8{2gUlK}0obYw=p+%=9C1v>vGVDUB+|{xQq(gL* zaA&34A!NL}J57V-xXvwYfA?U(soB%`*p86hb$ex@cD* zBhrM0HZRCvrynFSU_Z{n#5$OR5+5?+Ah{ANCh(!Up1Px-3ewf@hh@OWNlB}$EHdF3 zI%;?eD=BGcmK00+v<%Wt_6S=?7+LWY$ZaNFj%Xf-C6SR*5PXq!mq&&l`p+oSR1akriTIbB_#d%Bn^$e7&1bleNw86j!xMYlwNUe3fKa~GUFqZgJBcZ+6nZDE%$q5p=pEKBA5t7>{;Nx^;GL9R)b^+#$76U;`q zODEP|Zt=K;-vM~>|7q?`;H0Ro{qd@Z&1zaMyFa$Ij5iu???r2=#8KcpFKE-GV?wAmQnwJ>!`<`=`u4=kf zjQ`Ky`h2>p&bjBFd(OG%p1V}lE&TqBA3e%C7QaUPLgs)_-eGmX8vK`_j0-5?9lE-b zuIS+iYb{;fO@9yK&wc`b&hz+lU!uRa@E3j$f1y9)&-$GH=uVCm!C=$sOC!!=`lGzz zoA75JL*Q`w8-YK23|)=K9~ReeMSu3W^cN$rjjlTBY9U=YrV$CLw+*bHSeRBhjyu6I z-6GeDJtCmq0P`H@@e%Hf6~5z997yRJ?>GZ+wr&XRah#$eT;@A{2RY_lPR?~s-YMYM z<>a{LHmCOtC+A^+Bc0x^SwmUYFBsuCz3q@=TKAhMxwmcM@;th7v89CL86iaP-lh{F znXDO<;6zY_RZW+Di=2Ke;>g}bpf{aD(1)xY782cyZD?ohSwMunovc30`V|Z~n78fZ zSsRv^PA-*j3jK{hA*D_p^E-#RnbuOE!6xEbUo7aBX=nM1y+C zm;qM>LVRxmg!w<)fiTmka3MdQ+s9>=)yHJH<}helZ9+V!m=(!4>2x>vqLe=vraOY* z8vHql#0cl8A2|_Rn^!wW&E+fiWE7L-TEAN2-Hqb5EhcUUItL3IIoS}2^|gL#Hc-X} zaCThlx7fbJ1^sf^1GcimxPF(b0~eDBN7i*^ZEOM8h`RzwjhxiG2z4HI3&xKJH&J2# zA}hPMJ2_McChGXF!3X0*wp|1K1TJh@=E9r8CneGe+UnYYkdzky0MT!04{sh$zITAaz z^*Z=njBRy}DKy>Xkhl-U1Voxe)YuEGk3*}0?Be)wEvq5RCU5x_dp|ii$*# zU_pOov&*~I&HE?c#hJAaS$jgLoOf$VO4u5q0mA#%En@H6qrDFod1=@<`BtI19J;_` z@qt-*B&thrvOXlvuC;oR2i_vK4Dd=(i>sXh6P#YNocz#4;%9S5<9F=!Pa30ZfMtGG?<;O}zWgw*Biy&Qt~BZPS+ z;DW%^m%KL^mL}{QmR1w#Hk24aARrsK*0oDSh&rE&PwT5+9ib4QO!QOedeAJ$Uxk15 zC+4rShrkMF!<%}uy6`z%etUHb_f=Ok)G?=1_gwh`TY`(dW~eY&iH-_cX&X!n(voz=t+H+JM;S`+>rs{&iH-_M`Mnp&(&d^ zE(YCGf&YpzH}!U3JLVR_>MDwu$s(%1U;6)7K)7T;_~Jk8*1mO||EDj`2Lr;n{~uJ& za;}^%)s;lQ!y>}Y0oKM4dTH*mb}Xh&wu?u8v&px+}ci@IG!EIf_kGYSue)~?0L8PVA^b^OY4@d>8e;}p8R#V~n-Q}`#W$fL9k zMLZgFO?M_{vV9z9u9LTl@NQoxrvn%FV9qRu(~jevgRxpscL@$Lw4BgD!)0i|qnRj> zgRJ=@Qs+5CA*nUROQ@;O!7z?#9YXVK3^ngrvWI6scLU(@Urq9=n^?QnNSC5LXQt&?0^b(?`?q2i=Vrj~RnS5L*^K&%NKCIS> z%Y|_^d`sg9E-xgFvw^Mg51t);OKUXJ#0ftCtYNMe9`S3)JBS(}6sNRYcOe@f*Tt+P zpA>P;^PH32=bhXlHd4gBO!PR*zouH6i5wFjYbT26dp>yVl{|#I-n4l6H|dm)aIO1A z#bLbP03T@*_T^aus^*FR$h?mT^U`|32JL%@P;BZezDC1u>Ua;6{Jn*PD6t>jKe5v+ z(jAoTSu?`vV;xZAl)9bNuKMtr zmc32X?nR4hsQxMLoaAEd({v8)?Hoq2!^+|n4N;5Bodd1h<(T2F=J4Ljy@Ai@1`0NF z_;Jt>De&jKChWH!LUCk|SeGuQrLokOC??-91x--XCqkx!U27|3f=?GxgGj*duSclr zadKmb{}HQ5$e7#cSWt{{0F}{}Vy4J&s_Nb0T^j+3oAt6(z{JGvvE-WEre@)6x3vjuvbv?>H#X1;WZ4qYt z=;{+tV1|qrwCr;fmVGumS$$D^328o<7L-MA0@cJ~qb~jmU;_h)r{{=(yKB+TG&PBG z*%PhjsByXLW<2T~rrOq~{aXpj(BIQ&*tA|$RzD|uoA~Gi0mF#L|Z}R@4{pxDJGH(1VM3u}PFV^#=*^fdahzJ#b z@vXZWf9OB}zZObD6Vx|WVBKqj0sQUnL?h8#-XkdaV zqJ{0(I7lIl~kQ58|1c(Dk-6GVyj|PdwM2NdV4pcVW_Na_7-XkK%av7ae zWWAha!gBMi80(wPA=aNT{K5J&U|OLjcmyJ_Sbo0AALG-6c)k4W6mW!CZApbcTjcTG zBPd}y@cK85u3e(P1AE`tc>(Qq4vI?FFMo zaJM>Yz{a`*H&_m`-Y_VD7deNpf^RH_UZ(Zq#opa0C+~xVWA<|LX5p3uPM0!-I}n&I zrs0%$k-vK)gLm|4^bpW*ETOqFX%70J{-46k`3E(A{Sgc22PWJC>sfmAh z;m+@J%;H+Nqaf4z;6kqXZ*s%G=NmYIbGU2aRvsEjZ*m2?XN#NKyt?qhLipP^UtP#B z-WWbmubP z+Ph%+2zfgn1&Fa?9}I|>BDl}t#`u^0YwA2Mq3)e0H`e7~8gbu)K&$yd5+~n8umq~> zVkbY}9nVqnk07U6Nxl{9#^Ag^Ex=uhEUWO$2plsXEP4bv;?VnciBmKe_i@p0;%=sC z-69x^;Jbolrn~(6;k){#xp?~GHVj30q}C{mMNi;NP$PA!-fqmpS!Q1|#%`!_v&? z8&Fk+yeyO^aRetWhnmIKUJR+Ab3S?zD4AQ$s-Z_|r5>eVd_s2QBe_>`t)G7h&3ELk-q~2`1d;f!eK`g$5o#@-%!YI{s z!q&QlBdC0hbZ2n0{TI9Lf7Xx7@$j?69?{iw5A?S2vqB;3%x<=?Z5>5V1EDT|v2Zg@ z2S{t->OVvCbTBofXNyAeK_lS>>lUJt#Yz&q>JOmXug>SEl=<$vMX_QMLZu1=Y^FZ> z8yVm_W<%Zq>E&DoY~WHWArV@DiEx6o^L}uw3%kd!7K9%lWCdnDBFk0CLT-rO`Ff5l z;#S3e1`FzrQWvD@ILeJ-=29I3pLu3$h!5bHJQxM_cL0HqeS;a{w zGD8GCTJDGXv~osT1zKrv<_0pRX%I9R>7o{Bk_dy*Y5;|%oK0*ikOqSj#c2`MKooH& zLzGoY6kz&TZxA0Vmqvxr1dR$eJ2{tw5+|hRdZQ7#FT`jU%iJ{Iw9cT1$cA|s22F&? z^Brf9W1eAqp!&ZB30Czo8_zb)bk8aR*0gqWX*>c&YQ(z6-ptg0U9g(l4xU3JJj!C< z6*n%R9~13f@CXv~(UKnl$w+3nE6I=+>qJ;sfyPfV{AB^RNR31_E%SQ39yF7OJGm}A{x3wb0Kx1Ko9+BA9IRC#Xq=XrC1>7K>2Xf7QO+hK8ahF9th zK|KlS-eL>oTI-+=jg!beuLso;OE0%GK^GIemX6?OK7wuO2=4F^Y)waSr;p(G=?H$# z1an!2SAqmQ9=6u|IQ%Xh!QDQBm(vk!@DaR}j^G|2!Ee(MZ1fSln2z9H)SuZ^!vm7K z<^a+B`Sfi5C(%XWWR*OT_VMm&Y`cMS z>Z0@KbHN4|B@IzIW%T&+G1bv2Ax-2?w`s6Xv z5HqHhkBv^PoHA)jb@|w#kfFJ?qlKldudhwiMjKjcA<0xSPF!ReH!(VWa&_hODr7L4 z>!WS$qAo~&*Z{__OSH$MowW@yGVj(O$uN-Q?4zYM5 zN=9x}ML`kPM~Ow1+|u6G7)2d-HZ?b(y4eke;N02ODr#wNEec1qKpebQDlDwaQks(B zv3R^QQJd)MG@4?y9fqu@_GojxsJPCKSRKr-+^8is4;9-iSXD>IOf0LaG8)?x?a{iX zSlt}sjIxQ-%T?;+DPz?&TX*>lwRJW(w#Dj;>zZog229r6 z-X<$CN=6tu?4)8vqnM7XK*qNAL_<3aNcwfP#pXfCM66!;7h4zglPo4Wy1l){V4q_7 zDHqo(`C&a72-%|-axHs@5 zPBuqJJGT@vF^nATXzw8XqHJHvfLEv)Dcj}DDU(K5lnZL;lZaBKz;#5ctt_jqm@>I+ zqDW7A8ZqgFpT&&USfZ)Do@-6;?(CY~89U#oL?h8*qTQ(L;@W1Pte9Lq4B>!PoLWAn zqHJPx%#_Jh)zCD7(ovagBfOm61;Od%WGQxA>;f34qZTl?6EXwpH##-iP}`a4s7*8x z&(YH>CRSHW<|L!Ctg^CfMznm=Xf(8m>;o0!s*QS%FH*@0;Y@8^tq2@Z@^Gd8q$y*k zqtYeaq_V0B>YCXa6dB2N6t-eg`J^cd7*{s2%9po#%;^T4Allp+ZAK)kh4myQyXcvt zDGpR4e4t&+0@2JR0}(x=<0h7!CK9K$HMfviILJmDMT2Xsi#B%E#_Qo)^xX;OQZZ># z`PhoG>hfq({AsYPs0p1YV*`0_#pKf_mRC=iY-9{)(Td4q%WI<5qbHIGWCV(u2%4yI z;RLa^M0`G~KS8^vAYpC_VW&~wS{t8ZR3OGiMesMGn32RA4NDR~>X3I}(@+Y^Xv?eW2|JXO))hPnoYMSB_9 z(AClcN?R;pk2k=+i4JlmN=!XT1Fp1jHXjvhR{Z} zr(-;M9)cL*#Gr^a*#M9X^kQ^XSw=%9;m-B#T{NoVLZW4pXGAN@&ZIoTA;rjr`ybix zbX||GKs{rKMXpmn7Q+Y8kv50{g@N%AdcA~kLAYytWwxav;UEtsSGP96b*PQ_+gC2y-wH z#uwZ}jz(urRa={=YchD~I)A<}TU~oShoKZtp&>YR%~i7FtU8^#GcvUjEzVN22X<%y zrqdvANDv$v1cwE|;X&}YAb5NbJRu047zCjziA*C`65|JKV;;>5pB4jZ*$V{fnOk1U z+<>|bGT9U!JSo;w$e~0Jx6dHFta1kBkkJ=I5gz1F0H4vCEo?9qdGKj2}>L^VPls;^#V3h=G zBv>KA`UqA>ur`8~@dIkiVhp89W=pc+VU1X2S6iaFRrW+c$hOtmUXQtuYIPLUz^AGj zqeV5cMxbSd*V+BHVtOIT?MsC%WWs|Ea$|qw6 zNrNBEG-C0&2o7^O{wtv|y~HTB0+lj{rgX#;t!&ur=0;hQf#~F)rHUCAsah6q6di7T zU3+T>b+`2m@fbHk)t8Ins-u&poPkl=Txfyms)0~JBMaqFRCfI^j;vhcsBkT&ngAu6 zqQVRnfnoSCv~Z39(Ri$`eQpe$j)Z9>&ju26FLt464Ye&Cm7?Uo=wo6Hq`R2Ci=jD% z74Ygre~3vyoCBhqHN)u5s5z<|GB0{G+3V2cg1a4&Cg)J)2d?MF+UncmrL!S!3*|ua@RDIALrNR5 z08mPO_28i;CzT8>#VCovQW+$~sie**?dpt|Hn-Kabkz$6g9QWT1rNZAL2XNEa>)42 zGJG?t(mI;|d@~7zD1x}9d3I@IU0o@LRM8WT8;tR0*Sx`vZC#~9hbvWrT=yZI-Hde= zf5F@hmoyo{IWrJ)R*n7~fLNEog&Yx3hWV|t+gs=&5pQm5S0(kT#-elUJ* z8o~v0#yQUGF?2y)4xPoUmacS91fa2{eKvd*S8DcyOLlaeF@i-dT#y$N9^7$4nLMb) zB|Nv87MpO{ScgC1+mz5s5QigNVDMd!Qx*2Xp?w7WhkNuG0b;jbbE<8BZke;}Rraj} z+-l?g^(;y4o}vj4LfSz)EVIhK+P)5iisFmWij@Zn?}s3`!+y~=tBjE0?U0EFWm(tD z96g5Z!-tOR!F~pwk|#UMTBYeMA3k&f)15EHIW)_9%1@{8_gSIineh4~VN<0Lu*U#H zg}=tKyb(-!@K}+IXOIR>^HFMi=p?2)E=kwlU$n-DPG-7^NxEbF{4`$jt54D$=cm*7 zkdSX-lCI28r}3fDLgOS|wV&>Kg_rW(t>7#x>ZjBA&?(ICnIzpDKb^))y6s823;c8% zANnEl+nc1j%ulEBk}juQ*T;>1I*lhi=x{r*$08l@Cv&>(J=@D(XHFMsfz&)~Cz&@4 zB*U1bdY|mW%xN-hTAH*3OoVI6OnvK)-*ft;suaQbb|bGbOZkoyi!lA-%tkk?^o zh`Yl73Zo$!mgOteeM=MOK*XFg%yaD9?9XkSwmQ39I)!(Hk+LJa$piFU&J(%jIcPjs z4fnB;8hogkt*6+q(%$Icw9&cWu{xLZ*!MeIAc6gM_yG^6-$hnr;q+M69y(o=eN7Hd zYjQTw>5-iKa&dYn_cu7Dl;eJ?OF0yR9eSm0t+;53eWiUPne9gVI+x+=+#VNjkNbF- z;a9_(a_NG0Oxe%F{Zf~BJYtl6{FaclV%ZXVOXv?F6!9DD9Sf&-tc%HPPvdO?K-=sG z945NNy@KSp!hOM|yf3)7KpteiCA^;Ox<34Xfd3rs@rdBD$QKbNxFhTJEWUU>>n(Dh zkF&OBGyJFQzh(pean8ydhF9j?o`ZrP&v}`u@a3Gh1p2LU{LcDastr5a4b)RyX|5NU> zWbS9(*C6cf#rA9NZjrFtT}Jj^#xf&esrS5)`J(KVWX_c=DiZeOJef-s@no(PNVOL? zir$kB+pyflMwc3i;#)I4{3pHlPej$9KF{dVGqg2C~j;T*g# zmvAw`=thT@iS+*t4eO!NMF2*>e;GCfq~larmHKzuyO5470V6#7gHRPB&U+N7EXPxL ze4>joz9*X@VhhE2KNt>z*!@byBF>uvVl?6omr~e9p0#kfX}|A&=9*FPb^K|Ton;jF zAn_xYul7loc|W4Ot`Yvqys7E_ypHRJk$be33SYcJL-i7d_vNXFAJl`v8LoeLgtz1% z+4VGQ(!Owli^xr;Q*|i35Iuka0qQ4(z>fg|>^x6$4ii1gg*sA52$fkf+YU#%3|EPX zQ0u;^#VmrqeG>j0Zc0Kad{Rt}0Re>4pfJ220|GprwOw&0dKbGBnyy9(A<1OYe75c4 znvG|9f0sFXA!jHE+OAGDs2IyLWP?FiG3+3SFz8Q(*Qu6CAHaqs6-ZTs2OSJw%cVa1 zjqqR4?%_3dhLlLQ>uDy2dA9e?C~i7X?`-0qFJ-d;qDA;Yv4*;E|jyg`tdx`*k= zPy~nS_O8ZAifT&XlQpF=tX3)kYRXRlF-s-qfI}%HIn-2g^$t~6aPOqI1uG^ia z#iX#kJhD21ib1eYc&;|Xo1smEY72rejXxEpQITC%xgTWHwCXa+H1BzPrD+sCX&Q|Y zrqQ5wVvPX-Ofx^7X%uolOv7TX3y8VZdryj~@JTT>Mu@3FEvCi@F*l|YQz7?5Ol+uX zxWCt8_Cq6h+ItDz9kx2~-j{fXyg3LOj<-vuavcRhy!PTxg){g;6LxmHMv;Z6@22tP zjplljyBLtF6m%3`j_k^0hqHJT=fQUS+~N2^_>doj4=t6%vcn00VB-!vRWP%Sd^2j_ zVLf4ScjGC%tQ`2N#&DmK1V!bzK>~EJn`MVwNc7>bNAr7EA=6Y&PO^FB6h^S^zl_&t zUOcLY!EASw7PmJH*1H(PiQ>bp#TdpPek87cZEl7I?SF>dp0JCYM$<; znqwhy7WU;iLEmF>{cw0IaRI5}qdFGFP>QwiBD~Yb#&hq}JW7$T^aM=SHiRAtAu|#9 zl22d}D2*<_7^zR`%tf5KI<#J-!saS1SqmD&3}AJQ(Dk4(OoEztTh@dp@~FB{XsU~p zULCKMs!->6zPccF-6KZPiJX?g&M!mH;IkSOkrh7Ko;60aXASE1tTCcJ4;{sfjX*?J z$TTjKDIyaW_e{ymHV%ZC>&y+N5L4lkVrq;KQ-fMejS*rtNY31Us*#97rm<#njAYwc z{*egDX}HT|^R|rv@SXuj!h4pQS5qWNNj?&Ka0;0_kg+hC0+&z>rG|bC6!7*T@MDCA z*GZ8A6`_z{qoIGS1^4H+>prS=L?E&a%+~GP0&cej{V*4RlPZdS2p9gtoqCi2ZDTOB z9NdJ}il*~;9mPIx9)Nil3gE&kOu2&L%aQ}n&bh(_g)&g)O+qk^e2S9Ve%J(<2L*@`E(W@e#9S7V07?q>UU8HPEvtY|<_oBkZCUz;21X-p1P-uIBXd!s{h5H31Bb}+ANIl$+Ap&I^BhWM-GFQ@? z?#-c}hTKI4uOIz1w2p4K+)htA&$92cpTX@G`yzJ>tqNToUKIxPaQIbP9C|hU0YT4Z zZOOue&^`8+tZnEO?QK~v(OU3NbACZt=FzzfR^Qbl!wz5`Pb#+ zv_5}3PC{I$kv!br3{}EbfbRTt6ixna-bo^VLJ#eq4f!6Y)93bf4AHR+y_t$$o^unG z`B3gla{XCb!u2#qn)}~xK4aqahsa+rW3!*ihTv1}O*t>odf-bruYw5z2#?gnAEXfj zH=m3LYJ>I(4ehb}Tt@-&e)D-cb%(AF5%RInZ$hCVJ@%#cwKkn^qZQd%_Il?fv?KdX z=QS6n63I5z!Mr)AE+$*UXcZPRUm-a-~tF(&yWcJIX_S0gq_Q778um)lOt=H$Y6oK%0 z=ZQku;~*PF%K(h1vv{~jKSc)S)e`PcFb{zv5@Q>aupVc>1#8o~99m|%OerAPqKR0N z_7F6ew<0?uXeS6I^!<1`vH&9P_(8;-BW0?bi1|Up9Y2V;695e_a-85?O!^_r24Luy zkm;)9xmm)wLO+i_m`le9ssC^Z_aT@L_f%aY`Dh+jo2z8RSNLRP(-;xiG^it+#)!!F z8_5};T1YO7DI|Gr$|{DJoMw61zK9><2I9uFpyUvLb-KxCy4_3RGX{n@Dw(Zpd99LK z4srY-hB$r@LmU-$~mm{2=V%2VsxCl31?FDbQh=u_`xR$0^t9Tw~f> z?VUE#Y!~&Q(hN%R?$W+L9m{s!GMSO=hjM=GFDD4H-EY?Aklil}t&;OOg-_aDW55{A zM1$Jy8Uq3tYpt|yCt zi!Is;JpA^9s0%*`8>wDK`lcUb8@;THraA9D_QO;Y7?~)1(ncC1Y@|VLBaIO@+AcW< zMkWfG#zvXuywq#DpJ_44G%wo!i`fmCM&a2sSkUyFCJ4fP{HZXFEh|x))eo|1_Q+sJ zv%2Z_*3d`fKB#n}u?#kT z5DxAKQHwJrF{Gi}2Y!&vvrwjsDaj=vY~d(|-V&cQkD{dLf*_zlZ61vg=2<2=1k9t5 zY5X9QgY!7z4lO1*_&>~LxYbSllEO;|->9S(;}<^&2ls<8&4ZFyn$-`oX`a(%QrmyQ zd{g$_3ZFEM#t74BP@6_$glS%poCBs&$o()4xBXpOOft<|=EoS;QrlN}X_~!~jJs^Z z^Mf#rAB1WCA&I4F{2-g=OI;?J<`ENnXtLKRe9|-;BTS<~Z5oXcrpb~`K42P!++Wii zE}7XF0Zg@bhyF=k1JfwHG|jOp%m}aXgD{OBglUFJVriNHNTb+!atvtGYNlP^L~1$S zg%V4hJjSqo5M$USGF8r<{UFA$eh|X@K@r+g|J`@=k`XOy4PSy)whY4VZ z`8PRX{hRqE;lDI54FP{?XdRlry)LwY=9*X#+7Nn<5YL4^MRIB}fPE#Mh;Vlz7SQa9 zCCH2Ap(QA)N)-k8Z&XGV%D9C-qI-)d1NaTZyebWILInz#W}mnOQ#7;qYkDn+V!OMJ3muf@KDZs-&W3aZ!~h>JysLeIkm& zA{7@^DT)UQF?PG}GZ!9HcBrv^{$HEzoecHqQ$frd?HL{?nFT3wHfqSAhO&?gJ&&Bkl z&{f`gJSpVQ0H+Um=Sor$w&8Mdi8u<4(L3!3AyF>3Ow2lx$>ywNDjGCKrT&}&7|HOh zK(Zf3awM5t1TZ^SQyTz7rgw?b2q2p3-X)3eMn zJY^g#(HS|tG)M)8TyLwYae&w+UW0Cj;Zm?B3wxhQB3RcOEL~TbHJM1JRH@pO$>fy0 z2%DO%Fj5j`TO>hBMyI36kqo^QDD_gH)GK@}87~JdUPntJ(yKaEFP%twsnn!i zIyogT^wLREFAWR5G${4bu+U3^QZEHcy?zkXOM?NuX8QE1P1Q>$l3pq`sh3WMUMiL9 z8hYs@sh5U@UK*5oX;|o`K&h7krCxJ_dTB7A*OflK+EVq>iKLfGP3onSp_fXPdg&yo zmxhI28kBlzSm>odsh0w!UbhDI(qKTZCwzJ>NYzUxl3pq`sh3WMUMf}UrIVyy8WwtK zQ0k>&p_c-sUJ8_Yy%f|-g8{vE`}A6ts+UeAy;N#aFP#j%RI1cVCrQ0DEcDW#)Jwxc zF9k}y6e#sF@vTulK#aJd!GK^ znAy(OB4NrJ1i9Dgl<8#UwW(ItiDYG!nzXV`PRR=^>m+Gq4GSx4P+D2T!paJiR#u?2 z@{hDi>^&OPR(8;v-tN=ui5GfP4GX;# zDD_gH)a#c)y)>xxA|H7}5()2l)p~=xfW0Se3cN?*rT6TR45at?L3ocJg!jCwMPl#q zgLHe>{URXEI_pj?P1+PlqwrE1yF>&6DUBb5G=31$ge9?jtR?`0jaw+w>G8)|_OC-P z(`4O_cnUAa(0&XE+rs%j7^o)qK*uA)5+qB z3_RV=r)RY6EAU}wv95Hbc@Gf#()L3+{YsIc^d6#A@53dTm0}e~q^bAe{Arak4UY}c z;S-`1Q%jLgzYmA>-CD>2u;+jpOdn^};VQ8h3jxew%@6pD_InbfI%ySNwOW~+y}HTd z!yx%#AJK}`N9JxJyS>}QEw$vN+DCmL)HrY#Eye8;)R?F@32l=}bobXigw`%C z&y-d&F!hG3oGiiqG8fzF<&%`1xb~hP&QF-n$?ATN7KlX|Db&R*i3$yionvDFkpoZC)nqEJE5&lc7MlV9O#^rK|4^MB%g-9~II#yGmYDB5qFmlITu` z0y)(U5M$h<^SXGUMK9W&uZ5+jrV0-h^7Wwc;sy#rJ`5ve_-2Al2A*U|5ZyfgB_|#(`lyI5iwKjw7>v zXn%&j7X?hlm%s?Ck!yE&9h$ z3NqVpeyqD2EUjkQxyAOU<|UvD;tfYVnSxkvUyDav3`adKLIwWQS>+ne&g4TL_V2Bo z5J5dcVH^FhX#ODhs)rk>&W4|W=N>-B`)^640nAol)WZf;+*P4lkx@MXK*e2?-2>6p z&30mSue}Mm)U9ya=w-iRe+0}w`W!|^?Y#7`?3&foQY?y&t*=mQj| z?gNuUzZQBQ7oUY2!Zmf@)P>qyVP;k7*2eS`?gq~|+$+z{iE zkqVQGm(~6ez!<>10nA~VrpUg;yTLOY^(GeaxiPX4b*x@`qOv~r{s|iOei32b@;(8^ z_f`q*pH@+Flc z@ea$&CcnXAILUWY`r(1ghoRQ(I%|=Ai+P)A|I+-miP*Wtv|lnmH0{5dpPKeN?47^T zq(ki!PPXq1{R*z8-VGrIdqNw5IbxJDW|949=mlWT^kMoQX#X5DwPR^}$8`4CeqS z4tL1gJbxIedmQvxUv*HbZT$7$sWe$Ygp6K>;z%$Z#rP3k-F14FkT_<75#iN)6june zF&t;+Bi~;x7i-rBfu?bwF{(oliUEXSoG$6rP30~r{#g>QMg(Odo6#ci>Q#f$lD-jm z;(t;K|G1>ry}K$_)5BGT7ox98%%p9Bb& zK^xxD9*ug#P;6mHTfFX*ucWTMrJgpF#@>v)2_qNc+kc&5->$g~%bhWC9oSq~Z1N6l zayMz(J5lYcJ%)>!DmUAt1oiC#wWmI1Ri6>yg5*Z;b#d&+-qp|m3)f>K%EsnS+Hg0~ zie3F7Kqq#rtZ$Esjs2qa*zFmcb9c=%X!p#P73UYPH{~PNOYR z#Wv`)Q!@5(G@=#NlOVG4BGd!})<)Q-Wp)O&49H0^s$EY<$& zshij9qK{Kla!dT=X1BezQa`^9%nUwb2X9Wn&b6KNWduJEMOr-8QcHyLssp>Umrx@&N^n_%FIb3+ z&RCsMl0alON+?=@x}7S`D2X-6_|Q}jZVD1SM4%8k)HUx)ibBDF1*~m_50?mDv$0b! zB5`XgZR=mcd)XpeZCfK^n*^KN8Ymxaa~rLljlIU_Du}-mAX8vCKbo41hTH2}5H~f$ zcw3`@(+o`Z6V)H|G{u&~N8?U;q0YrA{M5W#p=lA;pMSH=ug-@nLU~3vA2`YYefIBv zx2y7Z%L{c@`BlEu@+TmFHhwC9KaB6_Q0J90Bf5XgZ*WhX5^?ksCxh<$P!^T{Ub)_& z&e@WmBxfG#n$EP&@Nb{|kI97(b)GGWmHet4b#6lXm%jYPqzn#q9wI<`#33NRL>m|2 zf_N)_Q)PaACZ%zx@>BI+D$_z zzlpqgzWi$arQbKq|331P{HlDlo>wgMpN=&DQS1kBy%#_7eU)FWdo;=X%COXa=}>Yj zKez!nfB9oMnwY>G}XNrh>>k8nzUwXWsK{2eNzJg4UO zke@^xl1g{3Kpc~TSL fy|#mW(=44N2MgFd|C z(}690{*yE0|HpPgf0=slNPuiBdC8}7GJmT5mSzb_PtEsJ3tJOLq#DMw4EditNKjmy z(*9IFb*A-3b?q-{^dSO!ro6AD(o$*9&yar=E%4)5C;QV(`H3n;L?#_ITjc*hI^5yF vmg1-UPxaps;FRxjM)`O29Fc#dYIqVaut`c. +You can redistribute it and/or modify it under either the terms of the +2-clause BSDL (see the file BSDL), or the conditions below: + +1. You may make and give away verbatim copies of the source form of the + software without restriction, provided that you duplicate all of the + original copyright notices and associated disclaimers. + +2. You may modify your copy of the software in any way, provided that + you do at least ONE of the following: + + a. place your modifications in the Public Domain or otherwise + make them Freely Available, such as by posting said + modifications to Usenet or an equivalent medium, or by allowing + the author to include your modifications in the software. + + b. use the modified software only within your corporation or + organization. + + c. give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d. make other distribution arrangements with the author. + +3. You may distribute the software in object code or binary form, + provided that you do at least ONE of the following: + + a. distribute the binaries and library files of the software, + together with instructions (in the manual page or equivalent) + on where to get the original distribution. + + b. accompany the distribution with the machine-readable source of + the software. + + c. give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d. make other distribution arrangements with the author. + +4. You may modify and include the part of the software into any other + software (possibly commercial). But some files in the distribution + are not written by the author, so that they are not under these terms. + + For the list of those files and their copying conditions, see the + file LEGAL. + +5. The scripts and library files supplied as input to or produced as + output from the software do not automatically fall under the + copyright of the software, but belong to whomever generated them, + and may be sold commercially, and may be aggregated with this + software. + +6. THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. diff --git a/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/LEGAL b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/LEGAL new file mode 100644 index 000000000..f2d801475 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/LEGAL @@ -0,0 +1,60 @@ +# -*- rdoc -*- + += LEGAL NOTICE INFORMATION +-------------------------- + +All the files in this distribution are covered under either the Ruby's +license (see the file COPYING) or public-domain except some files +mentioned below. + +== MIT License +>>> + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +== Old-style BSD license +>>> + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + IMPORTANT NOTE:: + + From ftp://ftp.cs.berkeley.edu/pub/4bsd/README.Impt.License.Change + paragraph 3 above is now null and void. diff --git a/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/README.md b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/README.md new file mode 100644 index 000000000..a29c58e73 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/README.md @@ -0,0 +1,48 @@ +# Base64 + +The Base64 module provides for the encoding (`#encode64`, `#strict_encode64`, +`#urlsafe_encode64`) and decoding (`#decode64`, `#strict_decode64`, +`#urlsafe_decode64`) of binary data using a Base64 representation. + +## Installation + +Add this line to your application's Gemfile: + +```ruby +gem 'base64' +``` + +And then execute: + + $ bundle install + +Or install it yourself as: + + $ gem install base64 + +## Usage + +A simple encoding and decoding. + +```ruby +require "base64" + +enc = Base64.encode64('Send reinforcements') + # -> "U2VuZCByZWluZm9yY2VtZW50cw==\n" +plain = Base64.decode64(enc) + # -> "Send reinforcements" +``` + +The purpose of using base64 to encode data is that it translates any +binary data into purely printable characters. + +## Development + +After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. + +To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org). + +## Contributing + +Bug reports and pull requests are welcome on GitHub at https://github.com/ruby/base64. + diff --git a/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/lib/base64.rb b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/lib/base64.rb new file mode 100644 index 000000000..8c0145d25 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/lib/base64.rb @@ -0,0 +1,381 @@ +# frozen_string_literal: true +# +# \Module \Base64 provides methods for: +# +# - \Encoding a binary string (containing non-ASCII characters) +# as a string of printable ASCII characters. +# - Decoding such an encoded string. +# +# \Base64 is commonly used in contexts where binary data +# is not allowed or supported: +# +# - Images in HTML or CSS files, or in URLs. +# - Email attachments. +# +# A \Base64-encoded string is about one-third larger that its source. +# See the {Wikipedia article}[https://en.wikipedia.org/wiki/Base64] +# for more information. +# +# This module provides three pairs of encode/decode methods. +# Your choices among these methods should depend on: +# +# - Which character set is to be used for encoding and decoding. +# - Whether "padding" is to be used. +# - Whether encoded strings are to contain newlines. +# +# Note: Examples on this page assume that the including program has executed: +# +# require 'base64' +# +# == \Encoding Character Sets +# +# A \Base64-encoded string consists only of characters from a 64-character set: +# +# - ('A'..'Z'). +# - ('a'..'z'). +# - ('0'..'9'). +# - =, the 'padding' character. +# - Either: +# - %w[+ /]: +# {RFC-2045-compliant}[https://datatracker.ietf.org/doc/html/rfc2045]; +# _not_ safe for URLs. +# - %w[- _]: +# {RFC-4648-compliant}[https://datatracker.ietf.org/doc/html/rfc4648]; +# safe for URLs. +# +# If you are working with \Base64-encoded strings that will come from +# or be put into URLs, you should choose this encoder-decoder pair +# of RFC-4648-compliant methods: +# +# - Base64.urlsafe_encode64 and Base64.urlsafe_decode64. +# +# Otherwise, you may choose any of the pairs in this module, +# including the pair above, or the RFC-2045-compliant pairs: +# +# - Base64.encode64 and Base64.decode64. +# - Base64.strict_encode64 and Base64.strict_decode64. +# +# == Padding +# +# \Base64-encoding changes a triplet of input bytes +# into a quartet of output characters. +# +# Padding in Encode Methods +# +# Padding -- extending an encoded string with zero, one, or two trailing +# = characters -- is performed by methods Base64.encode64, +# Base64.strict_encode64, and, by default, Base64.urlsafe_encode64: +# +# Base64.encode64('s') # => "cw==\n" +# Base64.strict_encode64('s') # => "cw==" +# Base64.urlsafe_encode64('s') # => "cw==" +# Base64.urlsafe_encode64('s', padding: false) # => "cw" +# +# When padding is performed, the encoded string is always of length 4n, +# where +n+ is a non-negative integer: +# +# - Input bytes of length 3n generate unpadded output characters +# of length 4n: +# +# # n = 1: 3 bytes => 4 characters. +# Base64.strict_encode64('123') # => "MDEy" +# # n = 2: 6 bytes => 8 characters. +# Base64.strict_encode64('123456') # => "MDEyMzQ1" +# +# - Input bytes of length 3n+1 generate padded output characters +# of length 4(n+1), with two padding characters at the end: +# +# # n = 1: 4 bytes => 8 characters. +# Base64.strict_encode64('1234') # => "MDEyMw==" +# # n = 2: 7 bytes => 12 characters. +# Base64.strict_encode64('1234567') # => "MDEyMzQ1Ng==" +# +# - Input bytes of length 3n+2 generate padded output characters +# of length 4(n+1), with one padding character at the end: +# +# # n = 1: 5 bytes => 8 characters. +# Base64.strict_encode64('12345') # => "MDEyMzQ=" +# # n = 2: 8 bytes => 12 characters. +# Base64.strict_encode64('12345678') # => "MDEyMzQ1Njc=" +# +# When padding is suppressed, for a positive integer n: +# +# - Input bytes of length 3n generate unpadded output characters +# of length 4n: +# +# # n = 1: 3 bytes => 4 characters. +# Base64.urlsafe_encode64('123', padding: false) # => "MDEy" +# # n = 2: 6 bytes => 8 characters. +# Base64.urlsafe_encode64('123456', padding: false) # => "MDEyMzQ1" +# +# - Input bytes of length 3n+1 generate unpadded output characters +# of length 4n+2, with two padding characters at the end: +# +# # n = 1: 4 bytes => 6 characters. +# Base64.urlsafe_encode64('1234', padding: false) # => "MDEyMw" +# # n = 2: 7 bytes => 10 characters. +# Base64.urlsafe_encode64('1234567', padding: false) # => "MDEyMzQ1Ng" +# +# - Input bytes of length 3n+2 generate unpadded output characters +# of length 4n+3, with one padding character at the end: +# +# # n = 1: 5 bytes => 7 characters. +# Base64.urlsafe_encode64('12345', padding: false) # => "MDEyMzQ" +# # m = 2: 8 bytes => 11 characters. +# Base64.urlsafe_encode64('12345678', padding: false) # => "MDEyMzQ1Njc" +# +# Padding in Decode Methods +# +# All of the \Base64 decode methods support (but do not require) padding. +# +# \Method Base64.decode64 does not check the size of the padding: +# +# Base64.decode64("MDEyMzQ1Njc") # => "01234567" +# Base64.decode64("MDEyMzQ1Njc=") # => "01234567" +# Base64.decode64("MDEyMzQ1Njc==") # => "01234567" +# +# \Method Base64.strict_decode64 strictly enforces padding size: +# +# Base64.strict_decode64("MDEyMzQ1Njc") # Raises ArgumentError +# Base64.strict_decode64("MDEyMzQ1Njc=") # => "01234567" +# Base64.strict_decode64("MDEyMzQ1Njc==") # Raises ArgumentError +# +# \Method Base64.urlsafe_decode64 allows padding in the encoded string, +# which if present, must be correct: +# see {Padding}[Base64.html#module-Base64-label-Padding], above: +# +# Base64.urlsafe_decode64("MDEyMzQ1Njc") # => "01234567" +# Base64.urlsafe_decode64("MDEyMzQ1Njc=") # => "01234567" +# Base64.urlsafe_decode64("MDEyMzQ1Njc==") # Raises ArgumentError. +# +# == Newlines +# +# An encoded string returned by Base64.encode64 or Base64.urlsafe_encode64 +# has an embedded newline character +# after each 60-character sequence, and, if non-empty, at the end: +# +# # No newline if empty. +# encoded = Base64.encode64("\x00" * 0) +# encoded.index("\n") # => nil +# +# # Newline at end of short output. +# encoded = Base64.encode64("\x00" * 1) +# encoded.size # => 4 +# encoded.index("\n") # => 4 +# +# # Newline at end of longer output. +# encoded = Base64.encode64("\x00" * 45) +# encoded.size # => 60 +# encoded.index("\n") # => 60 +# +# # Newlines embedded and at end of still longer output. +# encoded = Base64.encode64("\x00" * 46) +# encoded.size # => 65 +# encoded.rindex("\n") # => 65 +# encoded.split("\n").map {|s| s.size } # => [60, 4] +# +# The string to be encoded may itself contain newlines, +# which are encoded as \Base64: +# +# # Base64.encode64("\n\n\n") # => "CgoK\n" +# s = "This is line 1\nThis is line 2\n" +# Base64.encode64(s) # => "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK\n" +# +module Base64 + + VERSION = "0.3.0" + + module_function + + # :call-seq: + # Base64.encode64(string) -> encoded_string + # + # Returns a string containing the RFC-2045-compliant \Base64-encoding of +string+. + # + # Per RFC 2045, the returned string may contain the URL-unsafe characters + # + or /; + # see {Encoding Character Set}[Base64.html#module-Base64-label-Encoding+Character+Sets] above: + # + # Base64.encode64("\xFB\xEF\xBE") # => "++++\n" + # Base64.encode64("\xFF\xFF\xFF") # => "////\n" + # + # The returned string may include padding; + # see {Padding}[Base64.html#module-Base64-label-Padding] above. + # + # Base64.encode64('*') # => "Kg==\n" + # + # The returned string ends with a newline character, and if sufficiently long + # will have one or more embedded newline characters; + # see {Newlines}[Base64.html#module-Base64-label-Newlines] above: + # + # Base64.encode64('*') # => "Kg==\n" + # Base64.encode64('*' * 46) + # # => "KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq\nKg==\n" + # + # The string to be encoded may itself contain newlines, + # which will be encoded as ordinary \Base64: + # + # Base64.encode64("\n\n\n") # => "CgoK\n" + # s = "This is line 1\nThis is line 2\n" + # Base64.encode64(s) # => "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK\n" + # + def encode64(bin) + [bin].pack("m") + end + + # :call-seq: + # Base64.decode(encoded_string) -> decoded_string + # + # Returns a string containing the decoding of an RFC-2045-compliant + # \Base64-encoded string +encoded_string+: + # + # s = "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK\n" + # Base64.decode64(s) # => "This is line 1\nThis is line 2\n" + # + # Non-\Base64 characters in +encoded_string+ are ignored; + # see {Encoding Character Set}[Base64.html#module-Base64-label-Encoding+Character+Sets] above: + # these include newline characters and characters - and /: + # + # Base64.decode64("\x00\n-_") # => "" + # + # Padding in +encoded_string+ (even if incorrect) is ignored: + # + # Base64.decode64("MDEyMzQ1Njc") # => "01234567" + # Base64.decode64("MDEyMzQ1Njc=") # => "01234567" + # Base64.decode64("MDEyMzQ1Njc==") # => "01234567" + # + def decode64(str) + str.unpack1("m") + end + + # :call-seq: + # Base64.strict_encode64(string) -> encoded_string + # + # Returns a string containing the RFC-2045-compliant \Base64-encoding of +string+. + # + # Per RFC 2045, the returned string may contain the URL-unsafe characters + # + or /; + # see {Encoding Character Set}[Base64.html#module-Base64-label-Encoding+Character+Sets] above: + # + # Base64.strict_encode64("\xFB\xEF\xBE") # => "++++\n" + # Base64.strict_encode64("\xFF\xFF\xFF") # => "////\n" + # + # The returned string may include padding; + # see {Padding}[Base64.html#module-Base64-label-Padding] above. + # + # Base64.strict_encode64('*') # => "Kg==\n" + # + # The returned string will have no newline characters, regardless of its length; + # see {Newlines}[Base64.html#module-Base64-label-Newlines] above: + # + # Base64.strict_encode64('*') # => "Kg==" + # Base64.strict_encode64('*' * 46) + # # => "KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==" + # + # The string to be encoded may itself contain newlines, + # which will be encoded as ordinary \Base64: + # + # Base64.strict_encode64("\n\n\n") # => "CgoK" + # s = "This is line 1\nThis is line 2\n" + # Base64.strict_encode64(s) # => "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK" + # + def strict_encode64(bin) + [bin].pack("m0") + end + + # :call-seq: + # Base64.strict_decode64(encoded_string) -> decoded_string + # + # Returns a string containing the decoding of an RFC-2045-compliant + # \Base64-encoded string +encoded_string+: + # + # s = "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK" + # Base64.strict_decode64(s) # => "This is line 1\nThis is line 2\n" + # + # Non-\Base64 characters in +encoded_string+ are not allowed; + # see {Encoding Character Set}[Base64.html#module-Base64-label-Encoding+Character+Sets] above: + # these include newline characters and characters - and /: + # + # Base64.strict_decode64("\n") # Raises ArgumentError + # Base64.strict_decode64('-') # Raises ArgumentError + # Base64.strict_decode64('_') # Raises ArgumentError + # + # Padding in +encoded_string+, if present, must be correct: + # + # Base64.strict_decode64("MDEyMzQ1Njc") # Raises ArgumentError + # Base64.strict_decode64("MDEyMzQ1Njc=") # => "01234567" + # Base64.strict_decode64("MDEyMzQ1Njc==") # Raises ArgumentError + # + def strict_decode64(str) + str.unpack1("m0") + end + + # :call-seq: + # Base64.urlsafe_encode64(string) -> encoded_string + # + # Returns the RFC-4648-compliant \Base64-encoding of +string+. + # + # Per RFC 4648, the returned string will not contain the URL-unsafe characters + # + or /, + # but instead may contain the URL-safe characters + # - and _; + # see {Encoding Character Set}[Base64.html#module-Base64-label-Encoding+Character+Sets] above: + # + # Base64.urlsafe_encode64("\xFB\xEF\xBE") # => "----" + # Base64.urlsafe_encode64("\xFF\xFF\xFF") # => "____" + # + # By default, the returned string may have padding; + # see {Padding}[Base64.html#module-Base64-label-Padding], above: + # + # Base64.urlsafe_encode64('*') # => "Kg==" + # + # Optionally, you can suppress padding: + # + # Base64.urlsafe_encode64('*', padding: false) # => "Kg" + # + # The returned string will have no newline characters, regardless of its length; + # see {Newlines}[Base64.html#module-Base64-label-Newlines] above: + # + # Base64.urlsafe_encode64('*') # => "Kg==" + # Base64.urlsafe_encode64('*' * 46) + # # => "KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==" + # + def urlsafe_encode64(bin, padding: true) + str = strict_encode64(bin) + str.chomp!("==") or str.chomp!("=") unless padding + str.tr!("+/", "-_") + str + end + + # :call-seq: + # Base64.urlsafe_decode64(encoded_string) -> decoded_string + # + # Returns the decoding of an RFC-4648-compliant \Base64-encoded string +encoded_string+: + # + # +encoded_string+ may not contain non-Base64 characters; + # see {Encoding Character Set}[Base64.html#module-Base64-label-Encoding+Character+Sets] above: + # + # Base64.urlsafe_decode64('+') # Raises ArgumentError. + # Base64.urlsafe_decode64('/') # Raises ArgumentError. + # Base64.urlsafe_decode64("\n") # Raises ArgumentError. + # + # Padding in +encoded_string+, if present, must be correct: + # see {Padding}[Base64.html#module-Base64-label-Padding], above: + # + # Base64.urlsafe_decode64("MDEyMzQ1Njc") # => "01234567" + # Base64.urlsafe_decode64("MDEyMzQ1Njc=") # => "01234567" + # Base64.urlsafe_decode64("MDEyMzQ1Njc==") # Raises ArgumentError. + # + def urlsafe_decode64(str) + # NOTE: RFC 4648 does say nothing about unpadded input, but says that + # "the excess pad characters MAY also be ignored", so it is inferred that + # unpadded input is also acceptable. + if !str.end_with?("=") && str.length % 4 != 0 + str = str.ljust((str.length + 3) & ~3, "=") + str.tr!("-_", "+/") + else + str = str.tr("-_", "+/") + end + strict_decode64(str) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/sig/base64.rbs b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/sig/base64.rbs new file mode 100644 index 000000000..147e874c3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/base64-0.3.0/sig/base64.rbs @@ -0,0 +1,355 @@ +# +# Module Base64 provides methods for: +# +# * Encoding a binary string (containing non-ASCII characters) as a string of +# printable ASCII characters. +# * Decoding such an encoded string. +# +# Base64 is commonly used in contexts where binary data is not allowed or +# supported: +# +# * Images in HTML or CSS files, or in URLs. +# * Email attachments. +# +# A Base64-encoded string is about one-third larger that its source. See the +# [Wikipedia article](https://en.wikipedia.org/wiki/Base64) for more +# information. +# +# This module provides three pairs of encode/decode methods. Your choices among +# these methods should depend on: +# +# * Which character set is to be used for encoding and decoding. +# * Whether "padding" is to be used. +# * Whether encoded strings are to contain newlines. +# +# Note: Examples on this page assume that the including program has executed: +# +# require 'base64' +# +# ## Encoding Character Sets +# +# A Base64-encoded string consists only of characters from a 64-character set: +# +# * `('A'..'Z')`. +# * `('a'..'z')`. +# * `('0'..'9')`. +# * `=`, the 'padding' character. +# * Either: +# * `%w[+ /]`: +# [RFC-2045-compliant](https://datatracker.ietf.org/doc/html/rfc2045); +# *not* safe for URLs. +# * `%w[- _]`: +# [RFC-4648-compliant](https://datatracker.ietf.org/doc/html/rfc4648); +# safe for URLs. +# +# If you are working with Base64-encoded strings that will come from or be put +# into URLs, you should choose this encoder-decoder pair of RFC-4648-compliant +# methods: +# +# * Base64.urlsafe_encode64 and Base64.urlsafe_decode64. +# +# Otherwise, you may choose any of the pairs in this module, including the pair +# above, or the RFC-2045-compliant pairs: +# +# * Base64.encode64 and Base64.decode64. +# * Base64.strict_encode64 and Base64.strict_decode64. +# +# ## Padding +# +# Base64-encoding changes a triplet of input bytes into a quartet of output +# characters. +# +# **Padding in Encode Methods** +# +# Padding -- extending an encoded string with zero, one, or two trailing `=` +# characters -- is performed by methods Base64.encode64, Base64.strict_encode64, +# and, by default, Base64.urlsafe_encode64: +# +# Base64.encode64('s') # => "cw==\n" +# Base64.strict_encode64('s') # => "cw==" +# Base64.urlsafe_encode64('s') # => "cw==" +# Base64.urlsafe_encode64('s', padding: false) # => "cw" +# +# When padding is performed, the encoded string is always of length *4n*, where +# `n` is a non-negative integer: +# +# * Input bytes of length *3n* generate unpadded output characters of length +# *4n*: +# +# # n = 1: 3 bytes => 4 characters. +# Base64.strict_encode64('123') # => "MDEy" +# # n = 2: 6 bytes => 8 characters. +# Base64.strict_encode64('123456') # => "MDEyMzQ1" +# +# * Input bytes of length *3n+1* generate padded output characters of length +# *4(n+1)*, with two padding characters at the end: +# +# # n = 1: 4 bytes => 8 characters. +# Base64.strict_encode64('1234') # => "MDEyMw==" +# # n = 2: 7 bytes => 12 characters. +# Base64.strict_encode64('1234567') # => "MDEyMzQ1Ng==" +# +# * Input bytes of length *3n+2* generate padded output characters of length +# *4(n+1)*, with one padding character at the end: +# +# # n = 1: 5 bytes => 8 characters. +# Base64.strict_encode64('12345') # => "MDEyMzQ=" +# # n = 2: 8 bytes => 12 characters. +# Base64.strict_encode64('12345678') # => "MDEyMzQ1Njc=" +# +# When padding is suppressed, for a positive integer *n*: +# +# * Input bytes of length *3n* generate unpadded output characters of length +# *4n*: +# +# # n = 1: 3 bytes => 4 characters. +# Base64.urlsafe_encode64('123', padding: false) # => "MDEy" +# # n = 2: 6 bytes => 8 characters. +# Base64.urlsafe_encode64('123456', padding: false) # => "MDEyMzQ1" +# +# * Input bytes of length *3n+1* generate unpadded output characters of length +# *4n+2*, with two padding characters at the end: +# +# # n = 1: 4 bytes => 6 characters. +# Base64.urlsafe_encode64('1234', padding: false) # => "MDEyMw" +# # n = 2: 7 bytes => 10 characters. +# Base64.urlsafe_encode64('1234567', padding: false) # => "MDEyMzQ1Ng" +# +# * Input bytes of length *3n+2* generate unpadded output characters of length +# *4n+3*, with one padding character at the end: +# +# # n = 1: 5 bytes => 7 characters. +# Base64.urlsafe_encode64('12345', padding: false) # => "MDEyMzQ" +# # m = 2: 8 bytes => 11 characters. +# Base64.urlsafe_encode64('12345678', padding: false) # => "MDEyMzQ1Njc" +# +# **Padding in Decode Methods** +# +# All of the Base64 decode methods support (but do not require) padding. +# +# Method Base64.decode64 does not check the size of the padding: +# +# Base64.decode64("MDEyMzQ1Njc") # => "01234567" +# Base64.decode64("MDEyMzQ1Njc=") # => "01234567" +# Base64.decode64("MDEyMzQ1Njc==") # => "01234567" +# +# Method Base64.strict_decode64 strictly enforces padding size: +# +# Base64.strict_decode64("MDEyMzQ1Njc") # Raises ArgumentError +# Base64.strict_decode64("MDEyMzQ1Njc=") # => "01234567" +# Base64.strict_decode64("MDEyMzQ1Njc==") # Raises ArgumentError +# +# Method Base64.urlsafe_decode64 allows padding in `str`, which if present, must +# be correct: see [Padding](Base64.html#module-Base64-label-Padding), above: +# +# Base64.urlsafe_decode64("MDEyMzQ1Njc") # => "01234567" +# Base64.urlsafe_decode64("MDEyMzQ1Njc=") # => "01234567" +# Base64.urlsafe_decode64("MDEyMzQ1Njc==") # Raises ArgumentError. +# +# ## Newlines +# +# An encoded string returned by Base64.encode64 or Base64.urlsafe_encode64 has +# an embedded newline character after each 60-character sequence, and, if +# non-empty, at the end: +# +# # No newline if empty. +# encoded = Base64.encode64("\x00" * 0) +# encoded.index("\n") # => nil +# +# # Newline at end of short output. +# encoded = Base64.encode64("\x00" * 1) +# encoded.size # => 4 +# encoded.index("\n") # => 4 +# +# # Newline at end of longer output. +# encoded = Base64.encode64("\x00" * 45) +# encoded.size # => 60 +# encoded.index("\n") # => 60 +# +# # Newlines embedded and at end of still longer output. +# encoded = Base64.encode64("\x00" * 46) +# encoded.size # => 65 +# encoded.rindex("\n") # => 65 +# encoded.split("\n").map {|s| s.size } # => [60, 4] +# +# The string to be encoded may itself contain newlines, which are encoded as +# Base64: +# +# # Base64.encode64("\n\n\n") # => "CgoK\n" +# s = "This is line 1\nThis is line 2\n" +# Base64.encode64(s) # => "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK\n" +# +module Base64 + # + # Returns a string containing the decoding of an RFC-2045-compliant + # Base64-encoded string `str`: + # + # s = "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK\n" + # Base64.decode64(s) # => "This is line 1\nThis is line 2\n" + # + # Non-Base64 characters in `str` are ignored; see [Encoding Character + # Set](Base64.html#module-Base64-label-Encoding+Character+Sets) above: these + # include newline characters and characters `-` and `/`: + # + # Base64.decode64("\x00\n-_") # => "" + # + # Padding in `str` (even if incorrect) is ignored: + # + # Base64.decode64("MDEyMzQ1Njc") # => "01234567" + # Base64.decode64("MDEyMzQ1Njc=") # => "01234567" + # Base64.decode64("MDEyMzQ1Njc==") # => "01234567" + # + def self?.decode64: (String str) -> String + + # + # Returns a string containing the RFC-2045-compliant Base64-encoding of `bin`. + # + # Per RFC 2045, the returned string may contain the URL-unsafe characters `+` or + # `/`; see [Encoding Character + # Set](Base64.html#module-Base64-label-Encoding+Character+Sets) above: + # + # Base64.encode64("\xFB\xEF\xBE") # => "++++\n" + # Base64.encode64("\xFF\xFF\xFF") # => "////\n" + # + # The returned string may include padding; see + # [Padding](Base64.html#module-Base64-label-Padding) above. + # + # Base64.encode64('*') # => "Kg==\n" + # + # The returned string ends with a newline character, and if sufficiently long + # will have one or more embedded newline characters; see + # [Newlines](Base64.html#module-Base64-label-Newlines) above: + # + # Base64.encode64('*') # => "Kg==\n" + # Base64.encode64('*' * 46) + # # => "KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq\nKg==\n" + # + # The string to be encoded may itself contain newlines, which will be encoded as + # ordinary Base64: + # + # Base64.encode64("\n\n\n") # => "CgoK\n" + # s = "This is line 1\nThis is line 2\n" + # Base64.encode64(s) # => "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK\n" + # + def self?.encode64: (String bin) -> String + + # + # Returns a string containing the decoding of an RFC-2045-compliant + # Base64-encoded string `str`: + # + # s = "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK" + # Base64.strict_decode64(s) # => "This is line 1\nThis is line 2\n" + # + # Non-Base64 characters in `str` not allowed; see [Encoding Character + # Set](Base64.html#module-Base64-label-Encoding+Character+Sets) above: these + # include newline characters and characters `-` and `/`: + # + # Base64.strict_decode64("\n") # Raises ArgumentError + # Base64.strict_decode64('-') # Raises ArgumentError + # Base64.strict_decode64('_') # Raises ArgumentError + # + # Padding in `str`, if present, must be correct: + # + # Base64.strict_decode64("MDEyMzQ1Njc") # Raises ArgumentError + # Base64.strict_decode64("MDEyMzQ1Njc=") # => "01234567" + # Base64.strict_decode64("MDEyMzQ1Njc==") # Raises ArgumentError + # + def self?.strict_decode64: (String str) -> String + + # + # Returns a string containing the RFC-2045-compliant Base64-encoding of `bin`. + # + # Per RFC 2045, the returned string may contain the URL-unsafe characters `+` or + # `/`; see [Encoding Character + # Set](Base64.html#module-Base64-label-Encoding+Character+Sets) above: + # + # Base64.strict_encode64("\xFB\xEF\xBE") # => "++++\n" + # Base64.strict_encode64("\xFF\xFF\xFF") # => "////\n" + # + # The returned string may include padding; see + # [Padding](Base64.html#module-Base64-label-Padding) above. + # + # Base64.strict_encode64('*') # => "Kg==\n" + # + # The returned string will have no newline characters, regardless of its length; + # see [Newlines](Base64.html#module-Base64-label-Newlines) above: + # + # Base64.strict_encode64('*') # => "Kg==" + # Base64.strict_encode64('*' * 46) + # # => "KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==" + # + # The string to be encoded may itself contain newlines, which will be encoded as + # ordinary Base64: + # + # Base64.strict_encode64("\n\n\n") # => "CgoK" + # s = "This is line 1\nThis is line 2\n" + # Base64.strict_encode64(s) # => "VGhpcyBpcyBsaW5lIDEKVGhpcyBpcyBsaW5lIDIK" + # + def self?.strict_encode64: (String bin) -> String + + # + # Returns the decoding of an RFC-4648-compliant Base64-encoded string `str`: + # + # `str` may not contain non-Base64 characters; see [Encoding Character + # Set](Base64.html#module-Base64-label-Encoding+Character+Sets) above: + # + # Base64.urlsafe_decode64('+') # Raises ArgumentError. + # Base64.urlsafe_decode64('/') # Raises ArgumentError. + # Base64.urlsafe_decode64("\n") # Raises ArgumentError. + # + # Padding in `str`, if present, must be correct: see + # [Padding](Base64.html#module-Base64-label-Padding), above: + # + # Base64.urlsafe_decode64("MDEyMzQ1Njc") # => "01234567" + # Base64.urlsafe_decode64("MDEyMzQ1Njc=") # => "01234567" + # Base64.urlsafe_decode64("MDEyMzQ1Njc==") # Raises ArgumentError. + # + def self?.urlsafe_decode64: (String str) -> String + + # + # Returns the RFC-4648-compliant Base64-encoding of `bin`. + # + # Per RFC 4648, the returned string will not contain the URL-unsafe characters + # `+` or `/`, but instead may contain the URL-safe characters `-` and `_`; see + # [Encoding Character + # Set](Base64.html#module-Base64-label-Encoding+Character+Sets) above: + # + # Base64.urlsafe_encode64("\xFB\xEF\xBE") # => "----" + # Base64.urlsafe_encode64("\xFF\xFF\xFF") # => "____" + # + # By default, the returned string may have padding; see + # [Padding](Base64.html#module-Base64-label-Padding), above: + # + # Base64.urlsafe_encode64('*') # => "Kg==" + # + # Optionally, you can suppress padding: + # + # Base64.urlsafe_encode64('*', padding: false) # => "Kg" + # + # The returned string will have no newline characters, regardless of its length; + # see [Newlines](Base64.html#module-Base64-label-Newlines) above: + # + # Base64.urlsafe_encode64('*') # => "Kg==" + # Base64.urlsafe_encode64('*' * 46) + # # => "KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==" + # + def self?.urlsafe_encode64: (String bin, ?padding: boolish) -> String +end diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/dependabot.yml b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/dependabot.yml new file mode 100644 index 000000000..b18fd2935 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: 'github-actions' + directory: '/' + schedule: + interval: 'weekly' diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/push_gem.yml b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/push_gem.yml new file mode 100644 index 000000000..5019826c8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/push_gem.yml @@ -0,0 +1,46 @@ +name: Publish gem to rubygems.org + +on: + push: + tags: + - 'v*' + +permissions: + contents: read + +jobs: + push: + if: github.repository == 'ruby/benchmark' + runs-on: ubuntu-latest + + environment: + name: rubygems.org + url: https://rubygems.org/gems/benchmark + + permissions: + contents: write + id-token: write + + steps: + - name: Harden Runner + uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 + with: + egress-policy: audit + + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + + - name: Set up Ruby + uses: ruby/setup-ruby@a6e6f86333f0a2523ece813039b8b4be04560854 # v1.190.0 + with: + bundler-cache: true + ruby-version: ruby + + - name: Publish to RubyGems + uses: rubygems/release-gem@a25424ba2ba8b387abc8ef40807c2c85b96cbe32 # v1.1.1 + + - name: Create GitHub release + run: | + tag_name="$(git describe --tags --abbrev=0)" + gh release create "${tag_name}" --verify-tag --generate-notes + env: + GITHUB_TOKEN: ${{ secrets.MATZBOT_GITHUB_WORKFLOW_TOKEN }} diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/test.yml b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/test.yml new file mode 100644 index 000000000..74a4e7e03 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.github/workflows/test.yml @@ -0,0 +1,32 @@ +name: test + +on: [push, pull_request] + +jobs: + ruby-versions: + uses: ruby/actions/.github/workflows/ruby_versions.yml@master + with: + min_version: 2.5 + + test: + needs: ruby-versions + name: build (${{ matrix.ruby }} / ${{ matrix.os }}) + strategy: + matrix: + ruby: ${{ fromJson(needs.ruby-versions.outputs.versions) }} + os: [ ubuntu-latest, macos-latest, windows-latest ] + exclude: + - { os: macos-latest, ruby: 2.5 } + - { os: windows-latest, ruby: truffleruby-head } + - { os: windows-latest, ruby: truffleruby } + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - name: Set up Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + - name: Install dependencies + run: bundle install + - name: Run test + run: rake test diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.gitignore b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.gitignore new file mode 100644 index 000000000..4ea57987f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/.gitignore @@ -0,0 +1,9 @@ +/.bundle/ +/.yardoc +/_yardoc/ +/coverage/ +/doc/ +/pkg/ +/spec/reports/ +/tmp/ +Gemfile.lock diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/BSDL b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/BSDL new file mode 100644 index 000000000..66d93598a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/BSDL @@ -0,0 +1,22 @@ +Copyright (C) 1993-2013 Yukihiro Matsumoto. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/COPYING b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/COPYING new file mode 100644 index 000000000..48e5a96de --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/COPYING @@ -0,0 +1,56 @@ +Ruby is copyrighted free software by Yukihiro Matsumoto . +You can redistribute it and/or modify it under either the terms of the +2-clause BSDL (see the file BSDL), or the conditions below: + +1. You may make and give away verbatim copies of the source form of the + software without restriction, provided that you duplicate all of the + original copyright notices and associated disclaimers. + +2. You may modify your copy of the software in any way, provided that + you do at least ONE of the following: + + a. place your modifications in the Public Domain or otherwise + make them Freely Available, such as by posting said + modifications to Usenet or an equivalent medium, or by allowing + the author to include your modifications in the software. + + b. use the modified software only within your corporation or + organization. + + c. give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d. make other distribution arrangements with the author. + +3. You may distribute the software in object code or binary form, + provided that you do at least ONE of the following: + + a. distribute the binaries and library files of the software, + together with instructions (in the manual page or equivalent) + on where to get the original distribution. + + b. accompany the distribution with the machine-readable source of + the software. + + c. give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d. make other distribution arrangements with the author. + +4. You may modify and include the part of the software into any other + software (possibly commercial). But some files in the distribution + are not written by the author, so that they are not under these terms. + + For the list of those files and their copying conditions, see the + file LEGAL. + +5. The scripts and library files supplied as input to or produced as + output from the software do not automatically fall under the + copyright of the software, but belong to whomever generated them, + and may be sold commercially, and may be aggregated with this + software. + +6. THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Gemfile b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Gemfile new file mode 100644 index 000000000..3dc288354 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Gemfile @@ -0,0 +1,9 @@ +source "https://rubygems.org" + +gemspec + +group :development do + gem "bundler" + gem "rake" + gem "test-unit" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/README.md b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/README.md new file mode 100644 index 000000000..c5705939a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/README.md @@ -0,0 +1,138 @@ +# Benchmark + +The Benchmark module provides methods for benchmarking Ruby code, giving detailed reports on the time taken for each task. + +## Installation + +Add this line to your application's Gemfile: + +```ruby +gem 'benchmark' +``` + +And then execute: + + $ bundle + +Or install it yourself as: + + $ gem install benchmark + +## Usage + +The Benchmark module provides methods to measure and report the time used to execute Ruby code. + +Measure the time to construct the string given by the expression "a"*1_000_000_000: + +```ruby +require 'benchmark' +puts Benchmark.measure { "a"*1_000_000_000 } +``` + +On my machine (OSX 10.8.3 on i5 1.7 GHz) this generates: + +``` +0.350000 0.400000 0.750000 ( 0.835234) +``` + +This report shows the user CPU time, system CPU time, the total time (sum of user CPU time, system CPU time, children's user CPU time, and children's system CPU time), and the elapsed real time. The unit of time is seconds. + +Do some experiments sequentially using the #bm method: + +```ruby +require 'benchmark' +n = 5000000 +Benchmark.bm do |x| + x.report { for i in 1..n; a = "1"; end } + x.report { n.times do ; a = "1"; end } + x.report { 1.upto(n) do ; a = "1"; end } +end +``` + +The result: + +``` + user system total real +1.010000 0.000000 1.010000 ( 1.014479) +1.000000 0.000000 1.000000 ( 0.998261) +0.980000 0.000000 0.980000 ( 0.981335) +``` + +Continuing the previous example, put a label in each report: + +```ruby +require 'benchmark' +n = 5000000 +Benchmark.bm(7) do |x| + x.report("for:") { for i in 1..n; a = "1"; end } + x.report("times:") { n.times do ; a = "1"; end } + x.report("upto:") { 1.upto(n) do ; a = "1"; end } +end +``` + +The result: + +``` + user system total real +for: 1.010000 0.000000 1.010000 ( 1.015688) +times: 1.000000 0.000000 1.000000 ( 1.003611) +upto: 1.030000 0.000000 1.030000 ( 1.028098) +``` + +The times for some benchmarks depend on the order in which items are run. These differences are due to the cost of memory allocation and garbage collection. To avoid these discrepancies, the #bmbm method is provided. For example, to compare ways to sort an array of floats: + +```ruby +require 'benchmark' +array = (1..1000000).map { rand } +Benchmark.bmbm do |x| + x.report("sort!") { array.dup.sort! } + x.report("sort") { array.dup.sort } +end +``` + +The result: + +``` +Rehearsal ----------------------------------------- +sort! 1.490000 0.010000 1.500000 ( 1.490520) +sort 1.460000 0.000000 1.460000 ( 1.463025) +-------------------------------- total: 2.960000sec + user system total real +sort! 1.460000 0.000000 1.460000 ( 1.460465) +sort 1.450000 0.010000 1.460000 ( 1.448327) +``` + +Report statistics of sequential experiments with unique labels, using the #benchmark method: + +```ruby +require 'benchmark' +include Benchmark # we need the CAPTION and FORMAT constants +n = 5000000 +Benchmark.benchmark(CAPTION, 7, FORMAT, ">total:", ">avg:") do |x| + tf = x.report("for:") { for i in 1..n; a = "1"; end } + tt = x.report("times:") { n.times do ; a = "1"; end } + tu = x.report("upto:") { 1.upto(n) do ; a = "1"; end } + [tf+tt+tu, (tf+tt+tu)/3] +end +``` + +The result: + +``` + user system total real +for: 0.950000 0.000000 0.950000 ( 0.952039) +times: 0.980000 0.000000 0.980000 ( 0.984938) +upto: 0.950000 0.000000 0.950000 ( 0.946787) +>total: 2.880000 0.000000 2.880000 ( 2.883764) +>avg: 0.960000 0.000000 0.960000 ( 0.961255) +``` + +## Development + +After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. + +To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org). + +## Contributing + +Bug reports and pull requests are welcome on GitHub at https://github.com/ruby/benchmark. diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Rakefile b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Rakefile new file mode 100644 index 000000000..8830e057a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/Rakefile @@ -0,0 +1,8 @@ +require "bundler/gem_tasks" +require "rake/testtask" + +Rake::TestTask.new(:test) do |t| + t.test_files = FileList["test/**/test_*.rb"] +end + +task :default => :test diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/benchmark.gemspec b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/benchmark.gemspec new file mode 100644 index 000000000..35deff8d1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/benchmark.gemspec @@ -0,0 +1,32 @@ +name = File.basename(__FILE__, ".gemspec") +version = ["lib", Array.new(name.count("-")+1, ".").join("/")].find do |dir| + break File.foreach(File.join(__dir__, dir, "#{name.tr('-', '/')}.rb")) do |line| + /^\s*VERSION\s*=\s*"(.*)"/ =~ line and break $1 + end rescue nil +end + +Gem::Specification.new do |spec| + spec.name = name + spec.version = version + spec.authors = ["Yukihiro Matsumoto"] + spec.email = ["matz@ruby-lang.org"] + + spec.summary = %q{a performance benchmarking library} + spec.description = spec.summary + spec.homepage = "https://github.com/ruby/benchmark" + spec.licenses = ["Ruby", "BSD-2-Clause"] + + spec.required_ruby_version = ">= 2.1.0" + + spec.metadata["homepage_uri"] = spec.homepage + spec.metadata["source_code_uri"] = spec.homepage + + # Specify which files should be added to the gem when it is released. + # The `git ls-files -z` loads the files in the RubyGem that have been added into git. + spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do + `git ls-files -z 2>#{IO::NULL}`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } + end + spec.bindir = "exe" + spec.executables = [] + spec.require_paths = ["lib"] +end diff --git a/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/lib/benchmark.rb b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/lib/benchmark.rb new file mode 100644 index 000000000..0d1b8df6c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/benchmark-0.4.1/lib/benchmark.rb @@ -0,0 +1,595 @@ +# frozen_string_literal: true +#-- +# benchmark.rb - a performance benchmarking library +# +# $Id$ +# +# Created by Gotoken (gotoken@notwork.org). +# +# Documentation by Gotoken (original RD), Lyle Johnson (RDoc conversion), and +# Gavin Sinclair (editing). +#++ +# +# == Overview +# +# The Benchmark module provides methods for benchmarking Ruby code, giving +# detailed reports on the time taken for each task. +# + +# The Benchmark module provides methods to measure and report the time +# used to execute Ruby code. +# +# * Measure the time to construct the string given by the expression +# "a"*1_000_000_000: +# +# require 'benchmark' +# +# puts Benchmark.measure { "a"*1_000_000_000 } +# +# On my machine (OSX 10.8.3 on i5 1.7 GHz) this generates: +# +# 0.350000 0.400000 0.750000 ( 0.835234) +# +# This report shows the user CPU time, system CPU time, the total time +# (sum of user CPU time, system CPU time, children's user CPU time, +# and children's system CPU time), and the elapsed real time. The unit +# of time is seconds. +# +# * Do some experiments sequentially using the #bm method: +# +# require 'benchmark' +# +# n = 5000000 +# Benchmark.bm do |x| +# x.report { for i in 1..n; a = "1"; end } +# x.report { n.times do ; a = "1"; end } +# x.report { 1.upto(n) do ; a = "1"; end } +# end +# +# The result: +# +# user system total real +# 1.010000 0.000000 1.010000 ( 1.014479) +# 1.000000 0.000000 1.000000 ( 0.998261) +# 0.980000 0.000000 0.980000 ( 0.981335) +# +# * Continuing the previous example, put a label in each report: +# +# require 'benchmark' +# +# n = 5000000 +# Benchmark.bm(7) do |x| +# x.report("for:") { for i in 1..n; a = "1"; end } +# x.report("times:") { n.times do ; a = "1"; end } +# x.report("upto:") { 1.upto(n) do ; a = "1"; end } +# end +# +# The result: +# +# user system total real +# for: 1.010000 0.000000 1.010000 ( 1.015688) +# times: 1.000000 0.000000 1.000000 ( 1.003611) +# upto: 1.030000 0.000000 1.030000 ( 1.028098) +# +# * The times for some benchmarks depend on the order in which items +# are run. These differences are due to the cost of memory +# allocation and garbage collection. To avoid these discrepancies, +# the #bmbm method is provided. For example, to compare ways to +# sort an array of floats: +# +# require 'benchmark' +# +# array = (1..1000000).map { rand } +# +# Benchmark.bmbm do |x| +# x.report("sort!") { array.dup.sort! } +# x.report("sort") { array.dup.sort } +# end +# +# The result: +# +# Rehearsal ----------------------------------------- +# sort! 1.490000 0.010000 1.500000 ( 1.490520) +# sort 1.460000 0.000000 1.460000 ( 1.463025) +# -------------------------------- total: 2.960000sec +# +# user system total real +# sort! 1.460000 0.000000 1.460000 ( 1.460465) +# sort 1.450000 0.010000 1.460000 ( 1.448327) +# +# * Report statistics of sequential experiments with unique labels, +# using the #benchmark method: +# +# require 'benchmark' +# include Benchmark # we need the CAPTION and FORMAT constants +# +# n = 5000000 +# Benchmark.benchmark(CAPTION, 7, FORMAT, ">total:", ">avg:") do |x| +# tf = x.report("for:") { for i in 1..n; a = "1"; end } +# tt = x.report("times:") { n.times do ; a = "1"; end } +# tu = x.report("upto:") { 1.upto(n) do ; a = "1"; end } +# [tf+tt+tu, (tf+tt+tu)/3] +# end +# +# The result: +# +# user system total real +# for: 0.950000 0.000000 0.950000 ( 0.952039) +# times: 0.980000 0.000000 0.980000 ( 0.984938) +# upto: 0.950000 0.000000 0.950000 ( 0.946787) +# >total: 2.880000 0.000000 2.880000 ( 2.883764) +# >avg: 0.960000 0.000000 0.960000 ( 0.961255) + +module Benchmark + + VERSION = "0.4.1" + + BENCHMARK_VERSION = "2002-04-25" # :nodoc: + + # Invokes the block with a Benchmark::Report object, which + # may be used to collect and report on the results of individual + # benchmark tests. Reserves +label_width+ leading spaces for + # labels on each line. Prints +caption+ at the top of the + # report, and uses +format+ to format each line. + # (Note: +caption+ must contain a terminating newline character, + # see the default Benchmark::Tms::CAPTION for an example.) + # + # Returns an array of Benchmark::Tms objects. + # + # If the block returns an array of + # Benchmark::Tms objects, these will be used to format + # additional lines of output. If +labels+ parameter are + # given, these are used to label these extra lines. + # + # _Note_: Other methods provide a simpler interface to this one, and are + # suitable for nearly all benchmarking requirements. See the examples in + # Benchmark, and the #bm and #bmbm methods. + # + # Example: + # + # require 'benchmark' + # include Benchmark # we need the CAPTION and FORMAT constants + # + # n = 5000000 + # Benchmark.benchmark(CAPTION, 7, FORMAT, ">total:", ">avg:") do |x| + # tf = x.report("for:") { for i in 1..n; a = "1"; end } + # tt = x.report("times:") { n.times do ; a = "1"; end } + # tu = x.report("upto:") { 1.upto(n) do ; a = "1"; end } + # [tf+tt+tu, (tf+tt+tu)/3] + # end + # + # Generates: + # + # user system total real + # for: 0.970000 0.000000 0.970000 ( 0.970493) + # times: 0.990000 0.000000 0.990000 ( 0.989542) + # upto: 0.970000 0.000000 0.970000 ( 0.972854) + # >total: 2.930000 0.000000 2.930000 ( 2.932889) + # >avg: 0.976667 0.000000 0.976667 ( 0.977630) + # + + def benchmark(caption = "", label_width = nil, format = nil, *labels) # :yield: report + sync = $stdout.sync + $stdout.sync = true + label_width ||= 0 + label_width += 1 + format ||= FORMAT + report = Report.new(label_width, format) + results = yield(report) + + print " " * report.width + caption unless caption.empty? + report.list.each { |i| + print i.label.to_s.ljust(report.width) + print i.format(report.format, *format) + } + + Array === results and results.grep(Tms).each {|t| + print((labels.shift || t.label || "").ljust(label_width), t.format(format)) + } + report.list + ensure + $stdout.sync = sync unless sync.nil? + end + + + # A simple interface to the #benchmark method, #bm generates sequential + # reports with labels. +label_width+ and +labels+ parameters have the same + # meaning as for #benchmark. + # + # require 'benchmark' + # + # n = 5000000 + # Benchmark.bm(7) do |x| + # x.report("for:") { for i in 1..n; a = "1"; end } + # x.report("times:") { n.times do ; a = "1"; end } + # x.report("upto:") { 1.upto(n) do ; a = "1"; end } + # end + # + # Generates: + # + # user system total real + # for: 0.960000 0.000000 0.960000 ( 0.957966) + # times: 0.960000 0.000000 0.960000 ( 0.960423) + # upto: 0.950000 0.000000 0.950000 ( 0.954864) + # + + def bm(label_width = 0, *labels, &blk) # :yield: report + benchmark(CAPTION, label_width, FORMAT, *labels, &blk) + end + + + # Sometimes benchmark results are skewed because code executed + # earlier encounters different garbage collection overheads than + # that run later. #bmbm attempts to minimize this effect by running + # the tests twice, the first time as a rehearsal in order to get the + # runtime environment stable, the second time for + # real. GC.start is executed before the start of each of + # the real timings; the cost of this is not included in the + # timings. In reality, though, there's only so much that #bmbm can + # do, and the results are not guaranteed to be isolated from garbage + # collection and other effects. + # + # Because #bmbm takes two passes through the tests, it can + # calculate the required label width. + # + # require 'benchmark' + # + # array = (1..1000000).map { rand } + # + # Benchmark.bmbm do |x| + # x.report("sort!") { array.dup.sort! } + # x.report("sort") { array.dup.sort } + # end + # + # Generates: + # + # Rehearsal ----------------------------------------- + # sort! 1.440000 0.010000 1.450000 ( 1.446833) + # sort 1.440000 0.000000 1.440000 ( 1.448257) + # -------------------------------- total: 2.890000sec + # + # user system total real + # sort! 1.460000 0.000000 1.460000 ( 1.458065) + # sort 1.450000 0.000000 1.450000 ( 1.455963) + # + # #bmbm yields a Benchmark::Job object and returns an array of + # Benchmark::Tms objects. + # + def bmbm(width = 0) # :yield: job + job = Job.new(width) + yield(job) + width = job.width + 1 + sync = $stdout.sync + $stdout.sync = true + + # rehearsal + puts 'Rehearsal '.ljust(width+CAPTION.length,'-') + ets = job.list.inject(Tms.new) { |sum,(label,item)| + print label.ljust(width) + res = Benchmark.measure(&item) + print res.format + sum + res + }.format("total: %tsec") + print " #{ets}\n\n".rjust(width+CAPTION.length+2,'-') + + # take + print ' '*width + CAPTION + job.list.map { |label,item| + GC.start + print label.ljust(width) + Benchmark.measure(label, &item).tap { |res| print res } + } + ensure + $stdout.sync = sync unless sync.nil? + end + + # + # Returns the time used to execute the given block as a + # Benchmark::Tms object. Takes +label+ option. + # + # require 'benchmark' + # + # n = 1000000 + # + # time = Benchmark.measure do + # n.times { a = "1" } + # end + # puts time + # + # Generates: + # + # 0.220000 0.000000 0.220000 ( 0.227313) + # + def measure(label = "") # :yield: + t0, r0 = Process.times, Process.clock_gettime(Process::CLOCK_MONOTONIC) + yield + t1, r1 = Process.times, Process.clock_gettime(Process::CLOCK_MONOTONIC) + Benchmark::Tms.new(t1.utime - t0.utime, + t1.stime - t0.stime, + t1.cutime - t0.cutime, + t1.cstime - t0.cstime, + r1 - r0, + label) + end + + # + # Returns the elapsed real time used to execute the given block. + # The unit of time is seconds. + # + # Benchmark.realtime { "a" * 1_000_000_000 } + # #=> 0.5098029999935534 + # + def realtime # :yield: + r0 = Process.clock_gettime(Process::CLOCK_MONOTONIC) + yield + Process.clock_gettime(Process::CLOCK_MONOTONIC) - r0 + end + + module_function :benchmark, :measure, :realtime, :bm, :bmbm + + # + # A Job is a sequence of labelled blocks to be processed by the + # Benchmark.bmbm method. It is of little direct interest to the user. + # + class Job # :nodoc: + # + # Returns an initialized Job instance. + # Usually, one doesn't call this method directly, as new + # Job objects are created by the #bmbm method. + # +width+ is a initial value for the label offset used in formatting; + # the #bmbm method passes its +width+ argument to this constructor. + # + def initialize(width) + @width = width + @list = [] + end + + # + # Registers the given label and block pair in the job list. + # + def item(label = "", &blk) # :yield: + raise ArgumentError, "no block" unless block_given? + label = label.to_s + w = label.length + @width = w if @width < w + @list << [label, blk] + self + end + + alias report item + + # An array of 2-element arrays, consisting of label and block pairs. + attr_reader :list + + # Length of the widest label in the #list. + attr_reader :width + end + + # + # This class is used by the Benchmark.benchmark and Benchmark.bm methods. + # It is of little direct interest to the user. + # + class Report # :nodoc: + # + # Returns an initialized Report instance. + # Usually, one doesn't call this method directly, as new + # Report objects are created by the #benchmark and #bm methods. + # +width+ and +format+ are the label offset and + # format string used by Tms#format. + # + def initialize(width = 0, format = nil) + @width, @format, @list = width, format, [] + end + + # + # Prints the +label+ and measured time for the block, + # formatted by +format+. See Tms#format for the + # formatting rules. + # + def item(label = "", *format, &blk) # :yield: + w = label.to_s.length + @width = w if @width < w + @list << res = Benchmark.measure(label, &blk) + res + end + + alias report item + + # An array of Benchmark::Tms objects representing each item. + attr_reader :width, :format, :list + end + + + + # + # A data object, representing the times associated with a benchmark + # measurement. + # + class Tms + + # Default caption, see also Benchmark::CAPTION + CAPTION = " user system total real\n" + + # Default format string, see also Benchmark::FORMAT + FORMAT = "%10.6u %10.6y %10.6t %10.6r\n" + + # User CPU time + attr_reader :utime + + # System CPU time + attr_reader :stime + + # User CPU time of children + attr_reader :cutime + + # System CPU time of children + attr_reader :cstime + + # Elapsed real time + attr_reader :real + + # Total time, that is +utime+ + +stime+ + +cutime+ + +cstime+ + attr_reader :total + + # Label + attr_reader :label + + # + # Returns an initialized Tms object which has + # +utime+ as the user CPU time, +stime+ as the system CPU time, + # +cutime+ as the children's user CPU time, +cstime+ as the children's + # system CPU time, +real+ as the elapsed real time and +label+ as the label. + # + def initialize(utime = 0.0, stime = 0.0, cutime = 0.0, cstime = 0.0, real = 0.0, label = nil) + @utime, @stime, @cutime, @cstime, @real, @label = utime, stime, cutime, cstime, real, label.to_s + @total = @utime + @stime + @cutime + @cstime + end + + # + # Returns a new Tms object whose times are the sum of the times for this + # Tms object, plus the time required to execute the code block (+blk+). + # + def add(&blk) # :yield: + self + Benchmark.measure(&blk) + end + + # + # An in-place version of #add. + # Changes the times of this Tms object by making it the sum of the times + # for this Tms object, plus the time required to execute + # the code block (+blk+). + # + def add!(&blk) + t = Benchmark.measure(&blk) + @utime = utime + t.utime + @stime = stime + t.stime + @cutime = cutime + t.cutime + @cstime = cstime + t.cstime + @real = real + t.real + self + end + + # + # Returns a new Tms object obtained by memberwise summation + # of the individual times for this Tms object with those of the +other+ + # Tms object. + # This method and #/() are useful for taking statistics. + # + def +(other); memberwise(:+, other) end + + # + # Returns a new Tms object obtained by memberwise subtraction + # of the individual times for the +other+ Tms object from those of this + # Tms object. + # + def -(other); memberwise(:-, other) end + + # + # Returns a new Tms object obtained by memberwise multiplication + # of the individual times for this Tms object by +x+. + # + def *(x); memberwise(:*, x) end + + # + # Returns a new Tms object obtained by memberwise division + # of the individual times for this Tms object by +x+. + # This method and #+() are useful for taking statistics. + # + def /(x); memberwise(:/, x) end + + # + # Returns the contents of this Tms object as + # a formatted string, according to a +format+ string + # like that passed to Kernel.format. In addition, #format + # accepts the following extensions: + # + # %u:: Replaced by the user CPU time, as reported by Tms#utime. + # %y:: Replaced by the system CPU time, as reported by Tms#stime (Mnemonic: y of "s*y*stem") + # %U:: Replaced by the children's user CPU time, as reported by Tms#cutime + # %Y:: Replaced by the children's system CPU time, as reported by Tms#cstime + # %t:: Replaced by the total CPU time, as reported by Tms#total + # %r:: Replaced by the elapsed real time, as reported by Tms#real + # %n:: Replaced by the label string, as reported by Tms#label (Mnemonic: n of "*n*ame") + # + # If +format+ is not given, FORMAT is used as default value, detailing the + # user, system, total and real elapsed time. + # + def format(format = nil, *args) + str = (format || FORMAT).dup + str.gsub!(/(%[-+.\d]*)n/) { "#{$1}s" % label } + str.gsub!(/(%[-+.\d]*)u/) { "#{$1}f" % utime } + str.gsub!(/(%[-+.\d]*)y/) { "#{$1}f" % stime } + str.gsub!(/(%[-+.\d]*)U/) { "#{$1}f" % cutime } + str.gsub!(/(%[-+.\d]*)Y/) { "#{$1}f" % cstime } + str.gsub!(/(%[-+.\d]*)t/) { "#{$1}f" % total } + str.gsub!(/(%[-+.\d]*)r/) { "(#{$1}f)" % real } + format ? str % args : str + end + + # + # Same as #format. + # + def to_s + format + end + + # + # Returns a new 6-element array, consisting of the + # label, user CPU time, system CPU time, children's + # user CPU time, children's system CPU time and elapsed + # real time. + # + def to_a + [@label, @utime, @stime, @cutime, @cstime, @real] + end + + # + # Returns a hash containing the same data as `to_a`. + # + def to_h + { + label: @label, + utime: @utime, + stime: @stime, + cutime: @cutime, + cstime: @cstime, + real: @real + } + end + + protected + + # + # Returns a new Tms object obtained by memberwise operation +op+ + # of the individual times for this Tms object with those of the other + # Tms object (+x+). + # + # +op+ can be a mathematical operation such as +, -, + # *, / + # + def memberwise(op, x) + case x + when Benchmark::Tms + Benchmark::Tms.new(utime.__send__(op, x.utime), + stime.__send__(op, x.stime), + cutime.__send__(op, x.cutime), + cstime.__send__(op, x.cstime), + real.__send__(op, x.real) + ) + else + Benchmark::Tms.new(utime.__send__(op, x), + stime.__send__(op, x), + cutime.__send__(op, x), + cstime.__send__(op, x), + real.__send__(op, x) + ) + end + end + end + + # The default caption string (heading above the output times). + CAPTION = Benchmark::Tms::CAPTION + + # The default format string used to display times. See also Benchmark::Tms#format. + FORMAT = Benchmark::Tms::FORMAT +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/.rspec b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/.rspec new file mode 100644 index 000000000..53607ea52 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/.rspec @@ -0,0 +1 @@ +--colour diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CHANGELOG.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CHANGELOG.md new file mode 100644 index 000000000..c2881230e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CHANGELOG.md @@ -0,0 +1,518 @@ +# Changelog + +## 1.6.2 / 2025-05-12 + +- Handle upcoming changes to the `cgi` gem in Ruby 3.5 ([#147][pull-147]) + +- Fix issues found with `htmldiff` in Ruby 1.8 (which is used approximately + never, since the code change which broke Ruby 1.8 was made 6 years ago). + [#148][pull-148] + +- Fixed some standardrb formatting and configuration issues. + +## 1.6.1 / 2025-03-25 + +- Performed further work on `Diff::LCS::Ldiff` improvements ([#46][issue-46]) + and resolve several thread safety issues cleanly by making it a class. + [#129][pull-129] + +- Restructure the project to be more consistent with the rest of the projects + that I manage. + +- Increased GitHub action security. + +- Added [trusted publishing][tp] for fully automated releases. + +## 1.6.0 / 2025-02-13 + +- Baptiste Courtois (@annih) has done significant work on making `bin/ldiff` + work better, contributing a number of issues and pull requests. These include: + + - Separation of command parsing from diff-generation in `Diff::LCS::Ldiff` + code extraction making it easier to use separately from the `bin/ldiff` + command in [#103][pull-103]. This partially resolves [#46][issue-46]. + + - Improvement of binary and empty file detection and tests in [#104][pull-104] + and [#105][pull-105]. This resolves issues [#100][issue-100], + [#102][issue-102]. + + - Various ldiff fixes for output [#101][pull-101] resolves issues + [#106][issue-106] (ldiff ed scripts are inverted), [#107][issue-107] (ldiff + hunk ranges are incorrect; regression or incorrect fix for [#60][issue-60]), + and [#95][issue-95]. + +- Patrick Linnane fixed various minor typos. [#93][pull-93] + +- Mark Young added a Changelog link to the RubyGems metadata. [#92][pull-92] + This has been modified to incorporate it into the README. + +- Updated the documentation on `Diff::LCS#lcs` to be clearer about the + requirements for object equality comparison. This resolves [#70][issue-70]. + +- Governance: + + Changes described here are effective 2024-12-31. + + - Update gem management details to use markdown files for everything, enabled + in part by [flavorjones/hoe-markdown][hoe-markdown]. Several files were + renamed to be more consistent with standard practices. + + - Updated security notes with an [age][age] public key rather than pointing to + Keybase.io and a PGP public key which I no longer use. The use of the + [Tidelift security contact][tidelift] is recommended over direct disclosure. + +## 1.5.1 / 2024-01-31 + +- Peter Goldstein updated CI configuration to add Ruby 3.1 and Masato Nakamura + added Ruby 3.2 and 3.3. [#82][pull-82], [#89][pull-89] + +- Switched to [standard ruby][standard ruby] formatting. + +- Justin Steele converted the licence file to Markdown. [#84][pull-84] + +- Updated the gem SPDX identifier for GPL 2.0 or later, resolving [#86][pull-86] + by Vit Ondruch. + +- Resolve a potential security issue with `ldiff` in its use of `IO.read` + instead of `File.read`. [#91][issue-91] + +- Added MFA authentication requirement for release to RubyGems. [#90][pull-90] + +- Added Dependabot management for actions and gems. [#90][pull-90] + +- Updated CodeQL configuration. [#90][pull-90] + +## 1.5.0 / 2021-12-23 + +- Updated the CI configuration and monkey-patch Hoe. + +- Kenichi Kamiya fixed a test configuration deprecation in SimpleCov. + [#69][pull-69] + +- Tien introduced several corrections and code improvements: + + - Removed an off-by-one error when calculating an index value by embracing + Ruby iteration properly. This had a side-effect of fixing a long-standing + bug in `#traverse_sequences` where the traversal would not be transitive. + That is, `LCS(s2, s1)` should produce a sequence that is transitive with + `LCS(s1, s2)` on traversal, and applying the diff computed from those + results would result in equivalent changes that could be played forward or + backward as appropriate. [#71][pull-71], [#75][pull-75] + + - The above fix resulted in a changed order of the longest common subsequence + when callbacks were applied. After analysis, it was determined that the + computed subsequence was _equivalent_ to the prior version, so the test was + updated. This also resulted in the clarification of documentation when + traversing the sub-sequences. [#79][pull-79] + + - An infinite loop case in the case where `Diff::LCS` would be included into + an enumerable class has been fixed. [#73][pull-73] + + - Clarified the purpose of a threshold test in calculation of LCS. + [#72][pull-72], [#80][pull-80] + +- Removed autotest directory + +## 1.4.4 / 2020-07-01 + +- Fixed an issue reported by Jun Aruga in the `Diff::LCS::Ldiff` binary text + detection. [#44][issue-44] + +- Fixed a theoretical issue reported by Jun Aruga in `Diff::LCS::Hunk` to raise + a more useful exception. [#43][issue-43] + +- Added documentation that should address custom object issues as reported in + [#35][issue-35]. + +- Fixed more diff errors, in part reported in [#65][issue-65]. + + - The use of `Numeric#abs` is incorrect in `Diff::LCS::Block#diff_size`. The + diff size _must_ be accurate for correct change placement. + + - When selecting `@max_diff_size` in `Diff::LCS::Hunk`, choose it based on + `block.diff_size.abs`. + + - Made a number of changes that will, unfortunately, increase allocations at + the cost of being safe with frozen strings. + + - Add some knowledge that when `Diff::LCS::Hunk#diff` is called, that we are + processing the _last_ hunk, so some changes will be made to how the output + is generated. + + - `old`, `ed`, and `reverse_ed` formats have no differences. + + - `unified` format will report `\ No newline at end of file` given the + correct conditions, at most once. Unified range reporting also differs for + the last hunk such that the `length` of the range is reduced by one. + + - `context` format will report `\No newline at end of file` given the + correct conditions, up to once per "file". Context range reporting also + differs for the last hunk such that the `end` part of the range is reduced + by one to a minimum of one. + +- Added a bunch more tests for the cases above, and fixed `hunk_spec.rb` so that + the phrase being compared isn't nonsense French. + +- Updated formatting. + +- Added a Rake task to assist with manual testing on Ruby 1.8. + +## 1.4.3 / 2020-06-29 + +- Fixed several issues with 1.4 on Rubies older than 2.0. Some of this was + providing useful shim functions to Hoe 3.x (which dropped these older Rubies a + while ago). Specifically: + + - Removed Array#lazy from a method in `Diff::LCS::Hunk`. + + - Changed some unit tests to use old-style Symbol-keyed hashes. + + - Changed some unit test helper functions to no longer use keyword parameters, + but only a trailing options hash. + + - Made the use of `psych` dependent on `RUBY_VERSION >= 1.9`. + + Resolves [#63][issue-63]. + +## 1.4.2 / 2020-06-23 + +- Camille Drapier fixed a small issue with RuboCop configuration. [#59][pull-59] + +- Applied another fix (and unit test) to fix an issue for the Chef team. + [#60][issue-60], [#61][pull-61] + +## 1.4.1 / 2020-06-23 + +- Fix an issue where diff sizes could be negative, and they should be. + [#57][issue-57], [#58][pull-58] + +## 1.4 / 2020-06-23 + +- Ruby versions lower than 2.4 are soft-deprecated and will not be run as part + of the CI process any longer. + +- Akinora MUSHA (knu) added the ability for `Diff::LCS::Change` objects to be + implicitly treated arrays. Originally provided as pull request [#47][pull-47], + but it introduced a number of test failures as documented in [#48][issue-48], + and remediation of `Diff::LCS` itself was introduced in [#49][pull-49]. + +- Resolved [#5][issue-05] with some tests comparing output from `system` calls + to `bin/ldiff` with some pre-generated output. Resolved [#6][issue-06] with + these tests. + +- Resolved a previously undetected `bin/ldiff` issue with `--context` output not + matching `diff --context` output. + +- Resolved an issue with later versions of Ruby not working with an `OptParse` + specification of `Numeric`; this has been changed to `Integer`. + +- Brandon Fish added TruffleRuby in [#52][pull-52]. + +- Fixed two missing classes as reported in [#53][issue-53]. + +## 1.3 / 2017-01-18 + +- Bugs fixed: + + - Fixed an error for `bin/ldiff --version`. Fixes issue [#21][issue-21]. + + - Force `Diff::LCS::Change` and `Diff::LCS::ContextChange` to only perform + equality comparisons against themselves. Provided by Kevin Mook in pull + request [#29][pull-29]. + + - Fix tab expansion in `htmldiff`, provided by Mark Friedgan in pull request + [#25][pull-25]. + + - Silence Ruby 2.4 `Fixnum` deprecation warnings. Fixes issue [#38][issue-38] + and pull request [#36][pull-36]. + + - Ensure that test dependencies are loaded properly. Fixes issue + [#33][issue-33] and pull request [#34][pull-34]. + + - Fix issue [#1][issue-01] with incorrect intuition of patch direction. + Tentative fix, but the previous failure cases pass now. + +- Tooling changes: + + - Added SimpleCov and Coveralls support. + + - Change the homepage (temporarily) to the GitHub repo. + + - Updated testing and gem infrastructure. + + - Modernized the specs. + +- Cleaned up documentation. + +- Added a Code of Conduct. + +## 1.2.5 / 2013-11-08 + +- Bugs fixed: + + - Comparing arrays flattened them too far, especially with `Diff::LCS.sdiff`. + Fixed by Josh Bronson in pull request [#23][pull-23]. + +## 1.2.4 / 2013-04-20 + +- Bugs fixed: + + - A bug was introduced after 1.1.3 when pruning common sequences at the start + of comparison. Paul Kunysch (@pck) fixed this in pull request + [#18][pull-18]. Thanks! + + - The Rubinius (1.9 mode) bug in [rubinius/rubinius#2268][rubinius#2268] has + been fixed by the Rubinius team two days after it was filed. Thanks for + fixing this so quickly! + +- Switching to Raggi's hoe-gemspec2 for gemspec generation. + +## 1.2.3 / 2013-04-11 + +- Bugs Fixed: + + - The new encoding detection for diff output generation (added in 1.2.2) + introduced a bug if the left side of the comparison was the empty set. + Originally found in [rspec/rspec-expectations#238][rspec-expectations#238] + and [rspec/rspec-expectations#239][rspec-expectations#239]. Jon Rowe + developed a reasonable heuristic (left side, right side, empty string + literal) to avoid this bug. + + - There is a known issue with Rubinius in 1.9 mode reported in + [rubinius/rubinius#2268][rubinius#2268] and demonstrated in the Travis CI + builds. For all other tested platforms, diff-lcs is considered stable. As + soon as a suitably small test-case can be created for the Rubinius team to + examine, this will be added to the Rubinius issue around this. + +## 1.2.2 / 2013-03-30 + +- Bugs Fixed: + + - `Diff::LCS::Hunk` could not properly generate a difference for comparison + sets that are not US-ASCII-compatible because of the use of literal regular + expressions and strings. Jon Rowe found this in + [rspec/rspec-expectations#219][rspec-expectations#219] and provided a first + pass implementation in pull request [#15][pull-15]. I've reworked it because + of test failures in Rubinius when running in Ruby 1.9 mode. This coerces the + added values to the encoding of the old dataset (as determined by the first + piece of the old dataset). + + - Adding Travis CI testing for Ruby 2.0. + +## 1.2.1 / 2013-02-09 + +- Bugs Fixed: + + - As seen in [rspec/rspec-expectations#200][rspec-expectations#200], the + release of `Diff::LCS` 1.2 introduced an unnecessary public API change to + `Diff::LCS::Hunk` (see the change at + [rspec/rspec-expectations@3d6fc82c][rspec-expectations@3d6fc82c] for + details). The new method name (and behaviour) is more correct, but I should + not have renamed the function or should have at least provided an alias. + This release restores `Diff::LCS::Hunk#unshift` as an alias to #merge. Note + that the old `#unshift` behaviour was incorrect and will not be restored. + +## 1.2.0 / 2013-01-21 + +- Minor Enhancements: + + - Added special case handling for `Diff::LCS.patch` so that it handles patches + that are empty or contain no changes. + + - Added two new methods (`#patch_me` and `#unpatch_me`) to the include-able + module. + +- Bugs Fixed: + + - Fixed issue [#1][issue-01] patch direction detection. + + - Resolved issue [#2][issue-02] by handling `string[string.size, 1]` properly + (it returns `""` not `nil`). + + - Michael Granger (ged) fixed an implementation error in `Diff::LCS::Change` + and added specs in pull request [#8][pull-08]. Thanks! + + - Made the code auto-testable. + + - Vít Ondruch (voxik) provided the latest version of the GPL2 license file in + pull request [#10][pull-10]. Thanks! + + - Fixed a documentation issue with the include-able versions of `#patch!` and + `#unpatch!` where they implied that they would replace the original value. + Given that `Diff::LCS.patch` always returns a copy, the documentation was + incorrect and has been corrected. To provide the behaviour that was + originally documented, two new methods were added to provide this behaviour. + Found by scooter-dangle in issue [#12][issue-12]. Thanks! + +- Code Style Changes: + + - Removed trailing spaces. + + - Calling class methods using `.` instead of `::`. + + - Vít Ondruch (voxik) removed unnecessary shebangs in pull request + [#9][pull-09]. Thanks! + + - Kenichi Kamiya (kachick) removed some warnings of an unused variable in + lucky pull request [#13][pull-13]. Thanks! + + - Embarked on a major refactoring to make the files a little more manageable + and understand the code on a deeper level. + + - Adding CI via Travis CI. + +## 1.1.3 / 2011-08-27 + +- Converted to 'hoe' for release. + +- Converted tests to RSpec 2. + +- Extracted the body of `htmldiff` into a class available from + `diff/lcs/htmldiff`. + +- Migrated development and issue tracking to GitHub. + +- Bugs fixed: + + - Eliminated the explicit use of RubyGems in both `bin/htmldiff` and + `bin/ldiff`. Resolves issue [#4][issue-04]. + + - Eliminated Ruby warnings. Resolves issue [#3][issue-03]. + +## 1.1.2 / 2004-10-20 + +- Fixed a problem reported by Mauricio Fernandez in `htmldiff`. + +## 1.1.1 / 2004-09-25 + +- Fixed bug #891 (Set returned from patch command does not contain last equal + part). + +- Fixed a problem with callback initialisation code (it assumed that all + callbacks passed as classes can be initialised; now, it rescues NoMethodError + in the event of private :new being called). + +- Modified the non-initialisable callbacks to have a private `#new` method. + +- Moved `ldiff` core code to `Diff::LCS::Ldiff` (`diff/lcs/ldiff.rb`). + +## 1.1.0 + +- Eliminated the need for `Diff::LCS::Event` and removed it. + +- Added a contextual diff callback, `Diff::LCS::ContextDiffCallback`. + +- Implemented (un-)patching for standard diff callback output formats with both + `#diff` and `#sdiff`. + +- Extensive documentation changes. + +## 1.0.4 + +- Fixed a problem with `bin/ldiff` output, especially for unified format. + Newlines that should have been present weren't. + +- Changed the `.tar.gz` installer to generate Windows batch files if ones do not + exist already. Removed the existing batch files as they didn't work. + +## 1.0.3 + +- Fixed a problem with `#traverse_sequences` where the first difference from the + left sequence might not be appropriately captured. + +## 1.0.2 + +- Fixed an issue with `ldiff` not working because actions were changed from + symbols to strings. + +## 1.0.1 + +- Minor modifications to the `gemspec`, the `README`. + +- Renamed the diff program to `ldiff` (as well as the companion batch file) so + as to not collide with the standard diff program. + +- Fixed issues with RubyGems. Requires RubyGems > 0.6.1 or >= 0.6.1 with the + latest CVS version. + +## 1.0 + +- Initial release based mostly on Perl's Algorithm::Diff. + +[age]: https://github.com/FiloSottile/age +[hoe-halostatue]: https://github.com/halostatue/hoe-halostatue +[hoe-markdown]: https://github.com/flavorjones/hoe-markdown +[issue-01]: https://github.com/halostatue/diff-lcs/issues/1 +[issue-02]: https://github.com/halostatue/diff-lcs/issues/2 +[issue-03]: https://github.com/halostatue/diff-lcs/issues/3 +[issue-04]: https://github.com/halostatue/diff-lcs/issues/4 +[issue-05]: https://github.com/halostatue/diff-lcs/issues/5 +[issue-06]: https://github.com/halostatue/diff-lcs/issues/6 +[issue-12]: https://github.com/halostatue/diff-lcs/issues/12 +[issue-21]: https://github.com/halostatue/diff-lcs/issues/21 +[issue-33]: https://github.com/halostatue/diff-lcs/issues/33 +[issue-35]: https://github.com/halostatue/diff-lcs/issues/35 +[issue-38]: https://github.com/halostatue/diff-lcs/issues/38 +[issue-43]: https://github.com/halostatue/diff-lcs/issues/43 +[issue-44]: https://github.com/halostatue/diff-lcs/issues/44 +[issue-46]: https://github.com/halostatue/diff-lcs/issues/46 +[issue-48]: https://github.com/halostatue/diff-lcs/issues/48 +[issue-53]: https://github.com/halostatue/diff-lcs/issues/53 +[issue-57]: https://github.com/halostatue/diff-lcs/issues/57 +[issue-60]: https://github.com/halostatue/diff-lcs/issues/60 +[issue-63]: https://github.com/halostatue/diff-lcs/issues/63 +[issue-65]: https://github.com/halostatue/diff-lcs/issues/65 +[issue-70]: https://github.com/halostatue/diff-lcs/issues/70 +[issue-91]: https://github.com/halostatue/diff-lcs/issues/91 +[issue-95]: https://github.com/halostatue/diff-lcs/issues/95 +[issue-100]: https://github.com/halostatue/diff-lcs/issues/100 +[issue-102]: https://github.com/halostatue/diff-lcs/issues/102 +[issue-106]: https://github.com/halostatue/diff-lcs/issues/106 +[issue-107]: https://github.com/halostatue/diff-lcs/issues/107 +[pull-08]: https://github.com/halostatue/diff-lcs/pull/8 +[pull-09]: https://github.com/halostatue/diff-lcs/pull/9 +[pull-10]: https://github.com/halostatue/diff-lcs/pull/10 +[pull-13]: https://github.com/halostatue/diff-lcs/pull/13 +[pull-15]: https://github.com/halostatue/diff-lcs/pull/15 +[pull-18]: https://github.com/halostatue/diff-lcs/pull/18 +[pull-23]: https://github.com/halostatue/diff-lcs/pull/23 +[pull-25]: https://github.com/halostatue/diff-lcs/pull/25 +[pull-29]: https://github.com/halostatue/diff-lcs/pull/29 +[pull-34]: https://github.com/halostatue/diff-lcs/pull/34 +[pull-36]: https://github.com/halostatue/diff-lcs/pull/36 +[pull-47]: https://github.com/halostatue/diff-lcs/pull/47 +[pull-49]: https://github.com/halostatue/diff-lcs/pull/49 +[pull-52]: https://github.com/halostatue/diff-lcs/pull/52 +[pull-58]: https://github.com/halostatue/diff-lcs/pull/58 +[pull-59]: https://github.com/halostatue/diff-lcs/pull/59 +[pull-61]: https://github.com/halostatue/diff-lcs/pull/61 +[pull-69]: https://github.com/halostatue/diff-lcs/pull/69 +[pull-71]: https://github.com/halostatue/diff-lcs/pull/71 +[pull-72]: https://github.com/halostatue/diff-lcs/pull/72 +[pull-73]: https://github.com/halostatue/diff-lcs/pull/73 +[pull-75]: https://github.com/halostatue/diff-lcs/pull/75 +[pull-79]: https://github.com/halostatue/diff-lcs/pull/79 +[pull-80]: https://github.com/halostatue/diff-lcs/pull/80 +[pull-82]: https://github.com/halostatue/diff-lcs/pull/82 +[pull-84]: https://github.com/halostatue/diff-lcs/pull/84 +[pull-86]: https://github.com/halostatue/diff-lcs/pull/86 +[pull-89]: https://github.com/halostatue/diff-lcs/pull/89 +[pull-90]: https://github.com/halostatue/diff-lcs/pull/90 +[pull-92]: https://github.com/halostatue/diff-lcs/pull/92 +[pull-93]: https://github.com/halostatue/diff-lcs/pull/93 +[pull-101]: https://github.com/halostatue/diff-lcs/pull/101 +[pull-103]: https://github.com/halostatue/diff-lcs/pull/103 +[pull-104]: https://github.com/halostatue/diff-lcs/pull/104 +[pull-105]: https://github.com/halostatue/diff-lcs/pull/105 +[pull-129]: https://github.com/halostatue/diff-lcs/pull/129 +[pull-147]: https://github.com/halostatue/diff-lcs/pull/147 +[pull-148]: https://github.com/halostatue/diff-lcs/pull/148 +[rspec-expectations#200]: https://github.com/rspec/rspec-expectations/pull/200 +[rspec-expectations#219]: https://github.com/rspec/rspec-expectations/issues/219 +[rspec-expectations#238]: https://github.com/rspec/rspec-expectations/issues/238 +[rspec-expectations#239]: https://github.com/rspec/rspec-expectations/issues/239 +[rspec-expectations@3d6fc82c]: https://github.com/rspec/rspec-expectations/commit/3d6fc82c +[rubinius#2268]: https://github.com/rubinius/rubinius/issues/2268 +[standard ruby]: https://github.com/standardrb/standard +[tidelift]: https://tidelift.com/security +[tp]: https://guides.rubygems.org/trusted-publishing/ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CODE_OF_CONDUCT.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..184b5fb3a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of + any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, + without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official email address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at [INSERT CONTACT +METHOD]. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +. Translations are available at +. + +[homepage]: https://www.contributor-covenant.org +[Mozilla CoC]: https://github.com/mozilla/diversity diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTING.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTING.md new file mode 100644 index 000000000..4bcde4bdd --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTING.md @@ -0,0 +1,71 @@ +# Contributing + +Contribution to diff-lcs is encouraged in any form: a bug report, a feature +request, or code contributions. There are a few DOs and DON'Ts for +contributions. + +- DO: + + - Keep the coding style that already exists for any updated Ruby code (support + or otherwise). I use [Standard Ruby][standardrb] for linting and formatting. + + - Use thoughtfully-named topic branches for contributions. Rebase your commits + into logical chunks as necessary. + + - Use [quality commit messages][qcm]. + + - Add your name or GitHub handle to `CONTRIBUTORS.md` and a record in the + `CHANGELOG.md` as a separate commit from your main change. (Follow the style + in the `CHANGELOG.md` and provide a link to your PR.) + + - Add or update tests as appropriate for your change. The test suite is + written in [RSpec][rspec]. + + - Add or update documentation as appropriate for your change. The + documentation is RDoc; diff-lcs does not use extensions that may be present + in alternative documentation generators. + +- DO NOT: + + - Modify `VERSION` in `lib/diff/lcs/version.rb`. When your patch is accepted + and a release is made, the version will be updated at that point. + + - Modify `diff-lcs.gemspec`; it is a generated file. (You _may_ use + `rake gemspec` to regenerate it if your change involves metadata related to + gem itself). + + - Modify the `Gemfile`. + +## Test Dependencies + +diff-lcs uses Ryan Davis's [Hoe][Hoe] to manage the release process, and it adds +a number of rake tasks. You will mostly be interested in `rake`, which runs +tests in the same way that `rake spec` does. + +To assist with the installation of the development dependencies for diff-lcs, I +have provided a Gemfile pointing to the (generated) `diff-lcs.gemspec` file. +`minitar.gemspec` file. This will permit you to use `bundle install` to install +the dependencies. + +You can run tests with code coverage analysis by running `rake spec:coverage`. + +## Workflow + +Here's the most direct way to get your work merged into the project: + +- Fork the project. +- Clone your fork (`git clone git://github.com//diff-lcs.git`). +- Create a topic branch to contain your change + (`git checkout -b my_awesome_feature`). +- Hack away, add tests. Not necessarily in that order. +- Make sure everything still passes by running `rake`. +- If necessary, rebase your commits into logical chunks, without errors. +- Push the branch up (`git push origin my_awesome_feature`). +- Create a pull request against halostatue/diff-lcs and describe what your + change does and the why you think it should be merged. + +[hoe]: https://github.com/seattlerb/hoe +[qcm]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html +[release-gem]: https://github.com/rubygems/release-gem +[rspec]: http://rspec.info/documentation/ +[standardrb]: https://github.com/standardrb/standard diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTORS.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTORS.md new file mode 100644 index 000000000..9053019c4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/CONTRIBUTORS.md @@ -0,0 +1,49 @@ +# Contributors + +- Austin Ziegler (@halostatue) created diff-lcs. + +Thanks to everyone else who has contributed to diff-lcs over the years: + +- @ginriki +- @joshbronson +- @kevinmook +- @mckaz +- Akinori Musha +- Artem Ignatyev +- Brandon Fish +- Baptiste Courtois (@annih) +- Camille Drapier +- Cédric Boutillier +- @earlopain +- Gregg Kellogg +- Jagdeep Singh +- Jason Gladish +- Jon Rowe +- Josef Strzibny +- Josep (@apuratepp) +- Josh Bronson +- Jun Aruga +- Justin Steele +- Kenichi Kamiya +- Kensuke Nagae +- Kevin Ansfield +- Koichi Ito +- Mark Friedgan +- Masato Nakamura +- Mark Young +- Michael Granger +- Myron Marston +- Nicolas Leger +- Oleg Orlov +- Patrick Linnane +- Paul Kunysch +- Pete Higgins +- Peter Goldstein +- Peter Wagenet +- Philippe Lafoucrière +- Ryan Lovelett +- Scott Steele +- Simon Courtois +- Tien (@tiendo1011) +- Tomas Jura +- Vít Ondruch diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/LICENCE.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/LICENCE.md new file mode 100644 index 000000000..c57c3f160 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/LICENCE.md @@ -0,0 +1,40 @@ +# Licence + +This software is available under three licenses: the GNU GPL version 2 (or at +your option, a later version), the Perl Artistic license, or the MIT license. +Note that my preference for licensing is the MIT license, but Algorithm::Diff +was dually originally licensed with the Perl Artistic and the GNU GPL ("the same +terms as Perl itself") and given that the Ruby implementation originally hewed +pretty closely to the Perl version, I must maintain the additional licensing +terms. + +- Copyright 2004–2025 Austin Ziegler and contributors. +- Adapted from Algorithm::Diff (Perl) by Ned Konz and a Smalltalk version by + Mario I. Wolczko. + +## MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +## Perl Artistic License + +See the file docs/artistic.txt in the main distribution. + +## GNU GPL version 2 + +See the file docs/COPYING.txt in the main distribution. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Manifest.txt b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Manifest.txt new file mode 100644 index 000000000..fe58c86cf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Manifest.txt @@ -0,0 +1,115 @@ +.rspec +CHANGELOG.md +CODE_OF_CONDUCT.md +CONTRIBUTING.md +CONTRIBUTORS.md +LICENCE.md +Manifest.txt +README.md +Rakefile +SECURITY.md +bin/htmldiff +bin/ldiff +docs/COPYING.txt +docs/artistic.txt +lib/diff-lcs.rb +lib/diff/lcs.rb +lib/diff/lcs/array.rb +lib/diff/lcs/backports.rb +lib/diff/lcs/block.rb +lib/diff/lcs/callbacks.rb +lib/diff/lcs/change.rb +lib/diff/lcs/htmldiff.rb +lib/diff/lcs/hunk.rb +lib/diff/lcs/internals.rb +lib/diff/lcs/ldiff.rb +lib/diff/lcs/string.rb +lib/diff/lcs/version.rb +mise.toml +spec/change_spec.rb +spec/diff_spec.rb +spec/fixtures/123_x +spec/fixtures/456_x +spec/fixtures/aX +spec/fixtures/bXaX +spec/fixtures/ds1.csv +spec/fixtures/ds2.csv +spec/fixtures/empty +spec/fixtures/file1.bin +spec/fixtures/file2.bin +spec/fixtures/four_lines +spec/fixtures/four_lines_with_missing_new_line +spec/fixtures/ldiff/diff.missing_new_line1-e +spec/fixtures/ldiff/diff.missing_new_line1-f +spec/fixtures/ldiff/diff.missing_new_line2-e +spec/fixtures/ldiff/diff.missing_new_line2-f +spec/fixtures/ldiff/error.diff.chef-e +spec/fixtures/ldiff/error.diff.chef-f +spec/fixtures/ldiff/error.diff.missing_new_line1-e +spec/fixtures/ldiff/error.diff.missing_new_line1-f +spec/fixtures/ldiff/error.diff.missing_new_line2-e +spec/fixtures/ldiff/error.diff.missing_new_line2-f +spec/fixtures/ldiff/output.diff +spec/fixtures/ldiff/output.diff-c +spec/fixtures/ldiff/output.diff-e +spec/fixtures/ldiff/output.diff-f +spec/fixtures/ldiff/output.diff-u +spec/fixtures/ldiff/output.diff.bin1 +spec/fixtures/ldiff/output.diff.bin1-c +spec/fixtures/ldiff/output.diff.bin1-e +spec/fixtures/ldiff/output.diff.bin1-f +spec/fixtures/ldiff/output.diff.bin1-u +spec/fixtures/ldiff/output.diff.bin2 +spec/fixtures/ldiff/output.diff.bin2-c +spec/fixtures/ldiff/output.diff.bin2-e +spec/fixtures/ldiff/output.diff.bin2-f +spec/fixtures/ldiff/output.diff.bin2-u +spec/fixtures/ldiff/output.diff.chef +spec/fixtures/ldiff/output.diff.chef-c +spec/fixtures/ldiff/output.diff.chef-e +spec/fixtures/ldiff/output.diff.chef-f +spec/fixtures/ldiff/output.diff.chef-u +spec/fixtures/ldiff/output.diff.chef2 +spec/fixtures/ldiff/output.diff.chef2-c +spec/fixtures/ldiff/output.diff.chef2-d +spec/fixtures/ldiff/output.diff.chef2-e +spec/fixtures/ldiff/output.diff.chef2-f +spec/fixtures/ldiff/output.diff.chef2-u +spec/fixtures/ldiff/output.diff.empty.vs.four_lines +spec/fixtures/ldiff/output.diff.empty.vs.four_lines-c +spec/fixtures/ldiff/output.diff.empty.vs.four_lines-e +spec/fixtures/ldiff/output.diff.empty.vs.four_lines-f +spec/fixtures/ldiff/output.diff.empty.vs.four_lines-u +spec/fixtures/ldiff/output.diff.four_lines.vs.empty +spec/fixtures/ldiff/output.diff.four_lines.vs.empty-c +spec/fixtures/ldiff/output.diff.four_lines.vs.empty-e +spec/fixtures/ldiff/output.diff.four_lines.vs.empty-f +spec/fixtures/ldiff/output.diff.four_lines.vs.empty-u +spec/fixtures/ldiff/output.diff.issue95_trailing_context +spec/fixtures/ldiff/output.diff.issue95_trailing_context-c +spec/fixtures/ldiff/output.diff.issue95_trailing_context-e +spec/fixtures/ldiff/output.diff.issue95_trailing_context-f +spec/fixtures/ldiff/output.diff.issue95_trailing_context-u +spec/fixtures/ldiff/output.diff.missing_new_line1 +spec/fixtures/ldiff/output.diff.missing_new_line1-c +spec/fixtures/ldiff/output.diff.missing_new_line1-e +spec/fixtures/ldiff/output.diff.missing_new_line1-f +spec/fixtures/ldiff/output.diff.missing_new_line1-u +spec/fixtures/ldiff/output.diff.missing_new_line2 +spec/fixtures/ldiff/output.diff.missing_new_line2-c +spec/fixtures/ldiff/output.diff.missing_new_line2-e +spec/fixtures/ldiff/output.diff.missing_new_line2-f +spec/fixtures/ldiff/output.diff.missing_new_line2-u +spec/fixtures/new-chef +spec/fixtures/new-chef2 +spec/fixtures/old-chef +spec/fixtures/old-chef2 +spec/hunk_spec.rb +spec/issues_spec.rb +spec/lcs_spec.rb +spec/ldiff_spec.rb +spec/patch_spec.rb +spec/sdiff_spec.rb +spec/spec_helper.rb +spec/traverse_balanced_spec.rb +spec/traverse_sequences_spec.rb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/README.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/README.md new file mode 100644 index 000000000..65838036f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/README.md @@ -0,0 +1,92 @@ +# Diff::LCS + +- home :: https://github.com/halostatue/diff-lcs +- changelog :: https://github.com/halostatue/diff-lcs/blob/main/CHANGELOG.md +- code :: https://github.com/halostatue/diff-lcs +- bugs :: https://github.com/halostatue/diff-lcs/issues +- rdoc :: http://rubydoc.info/github/halostatue/diff-lcs + + + + + +## Description + +Diff::LCS computes the difference between two Enumerable sequences using the +McIlroy-Hunt longest common subsequence (LCS) algorithm. It includes utilities +to create a simple HTML diff output format and a standard diff-like tool. + +This is release 1.6.1, providing a simple extension that allows for +Diff::LCS::Change objects to be treated implicitly as arrays and fixes a number +of formatting issues. + +Ruby versions below 2.5 are soft-deprecated, which means that older versions are +no longer part of the CI test suite. If any changes have been introduced that +break those versions, bug reports and patches will be accepted, but it will be +up to the reporter to verify any fixes prior to release. The next major release +will completely break compatibility. + +## Synopsis + +Using this module is quite simple. By default, Diff::LCS does not extend objects +with the Diff::LCS interface, but will be called as if it were a function: + +```ruby +require 'diff/lcs' + +seq1 = %w(a b c e h j l m n p) +seq2 = %w(b c d e f j k l m r s t) + +lcs = Diff::LCS.LCS(seq1, seq2) +diffs = Diff::LCS.diff(seq1, seq2) +sdiff = Diff::LCS.sdiff(seq1, seq2) +seq = Diff::LCS.traverse_sequences(seq1, seq2, callback_obj) +bal = Diff::LCS.traverse_balanced(seq1, seq2, callback_obj) +seq2 == Diff::LCS.patch!(seq1, diffs) +seq1 == Diff::LCS.unpatch!(seq2, diffs) +seq2 == Diff::LCS.patch!(seq1, sdiff) +seq1 == Diff::LCS.unpatch!(seq2, sdiff) +``` + +Objects can be extended with Diff::LCS: + +```ruby +seq1.extend(Diff::LCS) +lcs = seq1.lcs(seq2) +diffs = seq1.diff(seq2) +sdiff = seq1.sdiff(seq2) +seq = seq1.traverse_sequences(seq2, callback_obj) +bal = seq1.traverse_balanced(seq2, callback_obj) +seq2 == seq1.patch!(diffs) +seq1 == seq2.unpatch!(diffs) +seq2 == seq1.patch!(sdiff) +seq1 == seq2.unpatch!(sdiff) +``` + +By requiring 'diff/lcs/array' or 'diff/lcs/string', Array or String will be +extended for use this way. + +Note that Diff::LCS requires a sequenced enumerable container, which means that +the order of enumeration is both predictable and consistent for the same set of +data. While it is theoretically possible to generate a diff for an unordered +hash, it will only be meaningful if the enumeration of the hashes is consistent. +In general, this will mean that containers that behave like String or Array will +perform best. + +## History + +Diff::LCS is a port of Perl's Algorithm::Diff that uses the McIlroy-Hunt longest +common subsequence (LCS) algorithm to compute intelligent differences between +two sequenced enumerable containers. The implementation is based on Mario I. +Wolczko's [Smalltalk version 1.2][smalltalk] (1993) and Ned Konz's Perl version +[Algorithm::Diff 1.15][perl]. `Diff::LCS#sdiff` and +`Diff::LCS#traverse_balanced` were originally written for the Perl version by +Mike Schilli. + +The algorithm is described in A Fast Algorithm for Computing Longest Common +Subsequences, CACM, vol.20, no.5, pp.350-353, May 1977, with a few minor +improvements to improve the speed. A simplified description of the algorithm, +originally written for the Perl version, was written by Mark-Jason Dominus. + +[smalltalk]: ftp://st.cs.uiuc.edu/pub/Smalltalk/MANCHESTER/manchester/4.0/diff.st +[perl]: http://search.cpan.org/~nedkonz/Algorithm-Diff-1.15/ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Rakefile b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Rakefile new file mode 100644 index 000000000..0bfe927ba --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/Rakefile @@ -0,0 +1,115 @@ +require "rubygems" +require "rspec" +require "rspec/core/rake_task" +require "hoe" +require "rake/clean" + +MAINTENANCE = ENV["MAINTENANCE"] == "true" +BUILD_DOCS = MAINTENANCE || ENV["DOCS"] == "true" +TRUSTED_RELEASE = ENV["rubygems_release_gem"] == "true" + +Hoe.plugin :halostatue +Hoe.plugin :rubygems + +Hoe.plugins.delete :debug +Hoe.plugins.delete :newb +Hoe.plugins.delete :signing +Hoe.plugins.delete :publish unless BUILD_DOCS + +if RUBY_VERSION < "1.9" + class Array # :nodoc: + def to_h + Hash[*flatten(1)] + end + end + + class Gem::Specification # :nodoc: + def metadata=(*) + end + + def default_value(*) + end + end + + class Object # :nodoc: + def caller_locations(*) + [] + end + end +end + +_spec = Hoe.spec "diff-lcs" do + developer("Austin Ziegler", "halostatue@gmail.com") + + self.trusted_release = TRUSTED_RELEASE + + require_ruby_version ">= 1.8" + + self.history_file = "CHANGELOG.md" + self.readme_file = "README.md" + self.licenses = ["MIT", "Artistic-1.0-Perl", "GPL-2.0-or-later"] + + spec_extras[:metadata] = ->(val) { + val["rubygems_mfa_required"] = "true" + } + + extra_dev_deps << ["hoe", "~> 4.0"] + extra_dev_deps << ["hoe-halostatue", "~> 2.0"] + extra_dev_deps << ["hoe-rubygems", "~> 1.0"] + extra_dev_deps << ["rspec", ">= 2.0", "< 4"] + extra_dev_deps << ["rake", ">= 10.0", "< 14"] + extra_dev_deps << ["rdoc", ">= 6.3.1", "< 7"] +end + +if BUILD_DOCS + rake_tasks = Rake.application.instance_variable_get(:@tasks) + tasks = ["publish_docs", "publish_on_announce", "debug_email", "post_blog", "announce"] + tasks.each do |task| + rake_tasks.delete(task) + end +end + +desc "Run all specifications" +RSpec::Core::RakeTask.new(:spec) do |t| + rspec_dirs = %w[spec lib].join(":") + t.rspec_opts = ["-I#{rspec_dirs}"] +end + +task :version do + require "diff/lcs/version" + puts Diff::LCS::VERSION +end + +Rake::Task["spec"].actions.uniq! { |a| a.source_location } + +# standard:disable Style/HashSyntax +task :default => :spec unless Rake::Task["default"].prereqs.include?("spec") +task :test => :spec unless Rake::Task["test"].prereqs.include?("spec") +# standard:enable Style/HashSyntax + +if RUBY_VERSION >= "3.0" && RUBY_ENGINE == "ruby" + namespace :spec do + desc "Runs test coverage. Only works Ruby 2.0+ and assumes 'simplecov' is installed." + task :coverage do + ENV["COVERAGE"] = "true" + Rake::Task["spec"].execute + end + end +end + +if MAINTENANCE + task ruby18: :package do + require "diff/lcs/version" + # standard:disable Layout/HeredocIndentation + puts <<-MESSAGE +You are starting a barebones Ruby 1.8 docker environment for testing. +A snapshot package has been built, so install it with: + + cd diff-lcs + gem install pkg/diff-lcs-#{Diff::LCS::VERSION} + + MESSAGE + # standard:enable Layout/HeredocIndentation + sh "docker run -it --rm -v #{Dir.pwd}:/root/diff-lcs bellbind/docker-ruby18-rails2 bash -l" + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/SECURITY.md b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/SECURITY.md new file mode 100644 index 000000000..16854f66a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/SECURITY.md @@ -0,0 +1,41 @@ +# diff-lcs Security + +## Supported Versions + +Security reports are accepted for the most recent major release and the previous +version for a limited time after the initial major release version. After a +major release, the previous version will receive full support for six months and +security support for an additional six months (for a total of twelve months). + +Because diff-lcs 1.x supports a wide range of Ruby versions, security reports +will only be accepted when they can be demonstrated on Ruby 3.1 or higher. + +> [!information] +> +> There will be a diff-lcs 2.0 released in 2025 which narrows support to modern +> versions of Ruby only. +> +> | Release Date | Support Ends | Security Support Ends | +> | ------------ | ------------ | --------------------- | +> | 2025 | +6 months | +12 months | +> +> If the 2.0.0 release happens on 2025-07-01, regular support for diff-lcs 1.x +> will end on 2026-12-31 and security support for diff-lcs 1.x will end on +> 2026-06-30. + +## Reporting a Vulnerability + +By preference, use the [Tidelift security contact][tidelift]. Tidelift will +coordinate the fix and disclosure. + +Alternatively, Send an email to [diff-lcs@halostatue.ca][email] with the text +`Diff::LCS` in the subject. Emails sent to this address should be encrypted +using [age][age] with the following public key: + +``` +age1fc6ngxmn02m62fej5cl30lrvwmxn4k3q2atqu53aatekmnqfwumqj4g93w +``` + +[tidelift]: https://tidelift.com/security +[email]: mailto:diff-lcs@halostatue.ca +[age]: https://github.com/FiloSottile/age diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/COPYING.txt b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/COPYING.txt new file mode 100644 index 000000000..d159169d1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/COPYING.txt @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/artistic.txt b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/artistic.txt new file mode 100644 index 000000000..763e17a94 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/docs/artistic.txt @@ -0,0 +1,127 @@ +The "Artistic License" + + Preamble + +The intent of this document is to state the conditions under which a +Package may be copied, such that the Copyright Holder maintains some +semblance of artistic control over the development of the package, +while giving the users of the package the right to use and distribute +the Package in a more-or-less customary fashion, plus the right to make +reasonable modifications. + +Definitions: + + "Package" refers to the collection of files distributed by the + Copyright Holder, and derivatives of that collection of files + created through textual modification. + + "Standard Version" refers to such a Package if it has not been + modified, or has been modified in accordance with the wishes + of the Copyright Holder as specified below. + + "Copyright Holder" is whoever is named in the copyright or + copyrights for the package. + + "You" is you, if you're thinking about copying or distributing + this Package. + + "Reasonable copying fee" is whatever you can justify on the + basis of media cost, duplication charges, time of people involved, + and so on. (You will not be required to justify it to the + Copyright Holder, but only to the computing community at large + as a market that must bear the fee.) + + "Freely Available" means that no fee is charged for the item + itself, though there may be fees involved in handling the item. + It also means that recipients of the item may redistribute it + under the same conditions they received it. + +1. You may make and give away verbatim copies of the source form of the +Standard Version of this Package without restriction, provided that you +duplicate all of the original copyright notices and associated disclaimers. + +2. You may apply bug fixes, portability fixes and other modifications +derived from the Public Domain or from the Copyright Holder. A Package +modified in such a way shall still be considered the Standard Version. + +3. You may otherwise modify your copy of this Package in any way, provided +that you insert a prominent notice in each changed file stating how and +when you changed that file, and provided that you do at least ONE of the +following: + + a) place your modifications in the Public Domain or otherwise make them + Freely Available, such as by posting said modifications to Usenet or + an equivalent medium, or placing the modifications on a major archive + site such as uunet.uu.net, or by allowing the Copyright Holder to include + your modifications in the Standard Version of the Package. + + b) use the modified Package only within your corporation or organization. + + c) rename any non-standard executables so the names do not conflict + with standard executables, which must also be provided, and provide + a separate manual page for each non-standard executable that clearly + documents how it differs from the Standard Version. + + d) make other distribution arrangements with the Copyright Holder. + +4. You may distribute the programs of this Package in object code or +executable form, provided that you do at least ONE of the following: + + a) distribute a Standard Version of the executables and library files, + together with instructions (in the manual page or equivalent) on where + to get the Standard Version. + + b) accompany the distribution with the machine-readable source of + the Package with your modifications. + + c) give non-standard executables non-standard names, and clearly + document the differences in manual pages (or equivalent), together + with instructions on where to get the Standard Version. + + d) make other distribution arrangements with the Copyright Holder. + +5. You may charge a reasonable copying fee for any distribution of this +Package. You may charge any fee you choose for support of this +Package. You may not charge a fee for this Package itself. However, +you may distribute this Package in aggregate with other (possibly +commercial) programs as part of a larger (possibly commercial) software +distribution provided that you do not advertise this Package as a +product of your own. You may embed this Package's interpreter within +an executable of yours (by linking); this shall be construed as a mere +form of aggregation, provided that the complete Standard Version of the +interpreter is so embedded. + +6. The scripts and library files supplied as input to or produced as +output from the programs of this Package do not automatically fall +under the copyright of this Package, but belong to whoever generated +them, and may be sold commercially, and may be aggregated with this +Package. If such scripts or library files are aggregated with this +Package via the so-called "undump" or "unexec" methods of producing a +binary executable image, then distribution of such an image shall +neither be construed as a distribution of this Package nor shall it +fall under the restrictions of Paragraphs 3 and 4, provided that you do +not represent such an executable image as a Standard Version of this +Package. + +7. C subroutines (or comparably compiled subroutines in other +languages) supplied by you and linked into this Package in order to +emulate subroutines and variables of the language defined by this +Package shall not be considered part of this Package, but are the +equivalent of input as in Paragraph 6, provided these subroutines do +not change the language in any way that would cause it to fail the +regression tests for the language. + +8. Aggregation of this Package with a commercial distribution is always +permitted provided that the use of this Package is embedded; that is, +when no overt attempt is made to make this Package's interfaces visible +to the end user of the commercial distribution. Such use shall not be +construed as a distribution of this Package. + +9. The name of the Copyright Holder may not be used to endorse or promote +products derived from this software without specific prior written permission. + +10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. + + The End diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff-lcs.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff-lcs.rb new file mode 100644 index 000000000..bc07bf995 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff-lcs.rb @@ -0,0 +1,3 @@ +# frozen_string_literal: true + +require "diff/lcs" diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs.rb new file mode 100644 index 000000000..5ee893779 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs.rb @@ -0,0 +1,742 @@ +# frozen_string_literal: true + +module Diff; end unless defined? Diff + +# == How Diff Works (by Mark-Jason Dominus) +# +# I once read an article written by the authors of +diff+; they said that they +# hard worked very hard on the algorithm until they found the right one. +# +# I think what they ended up using (and I hope someone will correct me, because +# I am not very confident about this) was the `longest common subsequence' +# method. In the LCS problem, you have two sequences of items: +# +# a b c d f g h j q z +# a b c d e f g i j k r x y z +# +# and you want to find the longest sequence of items that is present in both +# original sequences in the same order. That is, you want to find a new +# sequence *S* which can be obtained from the first sequence by deleting some +# items, and from the second sequence by deleting other items. You also want +# *S* to be as long as possible. In this case *S* is: +# +# a b c d f g j z +# +# From there it's only a small step to get diff-like output: +# +# e h i k q r x y +# + - + + - + + + +# +# This module solves the LCS problem. It also includes a canned function to +# generate +diff+-like output. +# +# It might seem from the example above that the LCS of two sequences is always +# pretty obvious, but that's not always the case, especially when the two +# sequences have many repeated elements. For example, consider +# +# a x b y c z p d q +# a b c a x b y c z +# +# A naive approach might start by matching up the +a+ and +b+ that appear at +# the beginning of each sequence, like this: +# +# a x b y c z p d q +# a b c a b y c z +# +# This finds the common subsequence +a b c z+. But actually, the LCS is +a x b +# y c z+: +# +# a x b y c z p d q +# a b c a x b y c z +module Diff::LCS +end + +require "diff/lcs/version" +require "diff/lcs/callbacks" +require "diff/lcs/internals" + +module Diff::LCS + # Returns an Array containing the longest common subsequence(s) between + # +self+ and +other+. See Diff::LCS#lcs. + # + # lcs = seq1.lcs(seq2) + # + # A note when using objects: Diff::LCS only works properly when each object + # can be used as a key in a Hash. This means that those objects must implement + # the methods +#hash+ and +#eql?+ such that two objects containing identical values + # compare identically for key purposes. That is: + # + # O.new('a').eql?(O.new('a')) == true && + # O.new('a').hash == O.new('a').hash + def lcs(other, &block) # :yields: self[i] if there are matched subsequences + Diff::LCS.lcs(self, other, &block) + end + + # Returns the difference set between +self+ and +other+. See Diff::LCS#diff. + def diff(other, callbacks = nil, &block) + Diff::LCS.diff(self, other, callbacks, &block) + end + + # Returns the balanced ("side-by-side") difference set between +self+ and + # +other+. See Diff::LCS#sdiff. + def sdiff(other, callbacks = nil, &block) + Diff::LCS.sdiff(self, other, callbacks, &block) + end + + # Traverses the discovered longest common subsequences between +self+ and + # +other+. See Diff::LCS#traverse_sequences. + def traverse_sequences(other, callbacks = nil, &block) + Diff::LCS.traverse_sequences(self, other, callbacks || Diff::LCS::SequenceCallbacks, &block) + end + + # Traverses the discovered longest common subsequences between +self+ and + # +other+ using the alternate, balanced algorithm. See + # Diff::LCS#traverse_balanced. + def traverse_balanced(other, callbacks = nil, &block) + Diff::LCS.traverse_balanced(self, other, callbacks || Diff::LCS::BalancedCallbacks, &block) + end + + # Attempts to patch +self+ with the provided +patchset+. A new sequence based + # on +self+ and the +patchset+ will be created. See Diff::LCS#patch. Attempts + # to autodiscover the direction of the patch. + def patch(patchset) + Diff::LCS.patch(self, patchset) + end + alias_method :unpatch, :patch + + # Attempts to patch +self+ with the provided +patchset+. A new sequence based + # on +self+ and the +patchset+ will be created. See Diff::LCS#patch. Does no + # patch direction autodiscovery. + def patch!(patchset) + Diff::LCS.patch!(self, patchset) + end + + # Attempts to unpatch +self+ with the provided +patchset+. A new sequence + # based on +self+ and the +patchset+ will be created. See Diff::LCS#unpatch. + # Does no patch direction autodiscovery. + def unpatch!(patchset) + Diff::LCS.unpatch!(self, patchset) + end + + # Attempts to patch +self+ with the provided +patchset+, using #patch!. If + # the sequence this is used on supports #replace, the value of +self+ will be + # replaced. See Diff::LCS#patch. Does no patch direction autodiscovery. + def patch_me(patchset) + if respond_to? :replace + replace(patch!(patchset)) + else + patch!(patchset) + end + end + + # Attempts to unpatch +self+ with the provided +patchset+, using #unpatch!. + # If the sequence this is used on supports #replace, the value of +self+ will + # be replaced. See Diff::LCS#unpatch. Does no patch direction autodiscovery. + def unpatch_me(patchset) + if respond_to? :replace + replace(unpatch!(patchset)) + else + unpatch!(patchset) + end + end +end + +class << Diff::LCS + def lcs(seq1, seq2, &block) # :yields: seq1[i] for each matched + matches = Diff::LCS::Internals.lcs(seq1, seq2) + ret = [] + string = seq1.is_a? String + matches.each_index do |i| + next if matches[i].nil? + + v = string ? seq1[i, 1] : seq1[i] + v = block[v] if block + ret << v + end + ret + end + alias_method :LCS, :lcs + + # #diff computes the smallest set of additions and deletions necessary to + # turn the first sequence into the second, and returns a description of these + # changes. + # + # See Diff::LCS::DiffCallbacks for the default behaviour. An alternate + # behaviour may be implemented with Diff::LCS::ContextDiffCallbacks. If a + # Class argument is provided for +callbacks+, #diff will attempt to + # initialise it. If the +callbacks+ object (possibly initialised) responds to + # #finish, it will be called. + def diff(seq1, seq2, callbacks = nil, &block) # :yields: diff changes + diff_traversal(:diff, seq1, seq2, callbacks || Diff::LCS::DiffCallbacks, &block) + end + + # #sdiff computes all necessary components to show two sequences and their + # minimized differences side by side, just like the Unix utility + # sdiff does: + # + # old < - + # same same + # before | after + # - > new + # + # See Diff::LCS::SDiffCallbacks for the default behaviour. An alternate + # behaviour may be implemented with Diff::LCS::ContextDiffCallbacks. If a + # Class argument is provided for +callbacks+, #diff will attempt to + # initialise it. If the +callbacks+ object (possibly initialised) responds to + # #finish, it will be called. + # + # Each element of a returned array is a Diff::LCS::ContextChange object, + # which can be implicitly converted to an array. + # + # Diff::LCS.sdiff(a, b).each do |action, (old_pos, old_element), (new_pos, new_element)| + # case action + # when '!' + # # replace + # when '-' + # # delete + # when '+' + # # insert + # end + # end + def sdiff(seq1, seq2, callbacks = nil, &block) # :yields: diff changes + diff_traversal(:sdiff, seq1, seq2, callbacks || Diff::LCS::SDiffCallbacks, &block) + end + + # #traverse_sequences is the most general facility provided by this module; + # #diff and #lcs are implemented as calls to it. + # + # The arguments to #traverse_sequences are the two sequences to traverse, and + # a callback object, like this: + # + # traverse_sequences(seq1, seq2, Diff::LCS::ContextDiffCallbacks.new) + # + # == Callback Methods + # + # Optional callback methods are emphasized. + # + # callbacks#match:: Called when +a+ and +b+ are pointing to + # common elements in +A+ and +B+. + # callbacks#discard_a:: Called when +a+ is pointing to an + # element not in +B+. + # callbacks#discard_b:: Called when +b+ is pointing to an + # element not in +A+. + # callbacks#finished_a:: Called when +a+ has reached the end of + # sequence +A+. + # callbacks#finished_b:: Called when +b+ has reached the end of + # sequence +B+. + # + # == Algorithm + # + # a---+ + # v + # A = a b c e h j l m n p + # B = b c d e f j k l m r s t + # ^ + # b---+ + # + # If there are two arrows (+a+ and +b+) pointing to elements of sequences +A+ + # and +B+, the arrows will initially point to the first elements of their + # respective sequences. #traverse_sequences will advance the arrows through + # the sequences one element at a time, calling a method on the user-specified + # callback object before each advance. It will advance the arrows in such a + # way that if there are elements A[i] and B[j] which are + # both equal and part of the longest common subsequence, there will be some + # moment during the execution of #traverse_sequences when arrow +a+ is + # pointing to A[i] and arrow +b+ is pointing to B[j]. When + # this happens, #traverse_sequences will call callbacks#match and + # then it will advance both arrows. + # + # Otherwise, one of the arrows is pointing to an element of its sequence that + # is not part of the longest common subsequence. #traverse_sequences will + # advance that arrow and will call callbacks#discard_a or + # callbacks#discard_b, depending on which arrow it advanced. If both + # arrows point to elements that are not part of the longest common + # subsequence, then #traverse_sequences will advance arrow +a+ and call the + # appropriate callback, then it will advance arrow +b+ and call the appropriate + # callback. + # + # The methods for callbacks#match, callbacks#discard_a, and + # callbacks#discard_b are invoked with an event comprising the + # action ("=", "+", or "-", respectively), the indexes +i+ and +j+, and the + # elements A[i] and B[j]. Return values are discarded by + # #traverse_sequences. + # + # === End of Sequences + # + # If arrow +a+ reaches the end of its sequence before arrow +b+ does, + # #traverse_sequence will try to call callbacks#finished_a with the + # last index and element of +A+ (A[-1]) and the current index and + # element of +B+ (B[j]). If callbacks#finished_a does not + # exist, then callbacks#discard_b will be called on each element of + # +B+ until the end of the sequence is reached (the call will be done with + # A[-1] and B[j] for each element). + # + # If +b+ reaches the end of +B+ before +a+ reaches the end of +A+, + # callbacks#finished_b will be called with the current index and + # element of +A+ (A[i]) and the last index and element of +B+ + # (A[-1]). Again, if callbacks#finished_b does not exist on + # the callback object, then callbacks#discard_a will be called on + # each element of +A+ until the end of the sequence is reached (A[i] + # and B[-1]). + # + # There is a chance that one additional callbacks#discard_a or + # callbacks#discard_b will be called after the end of the sequence + # is reached, if +a+ has not yet reached the end of +A+ or +b+ has not yet + # reached the end of +B+. + def traverse_sequences(seq1, seq2, callbacks = Diff::LCS::SequenceCallbacks) # :yields: change events + callbacks ||= Diff::LCS::SequenceCallbacks + matches = Diff::LCS::Internals.lcs(seq1, seq2) + + run_finished_a = run_finished_b = false + string = seq1.is_a?(String) + + a_size = seq1.size + b_size = seq2.size + ai = bj = 0 + + matches.each do |b_line| + if b_line.nil? + unless seq1[ai].nil? + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[bj, 1] : seq2[bj] + + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + end + else + ax = string ? seq1[ai, 1] : seq1[ai] + + loop do + break unless bj < b_line + + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + bj += 1 + end + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new("=", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.match(event) + bj += 1 + end + ai += 1 + end + + # The last entry (if any) processed was a match. +ai+ and +bj+ point just + # past the last matching lines in their sequences. + while (ai < a_size) || (bj < b_size) + # last A? + if ai == a_size && bj < b_size + if callbacks.respond_to?(:finished_a) && !run_finished_a + ax = string ? seq1[-1, 1] : seq1[-1] + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new(">", a_size - 1, ax, bj, bx) + event = yield event if block_given? + callbacks.finished_a(event) + run_finished_a = true + else + ax = string ? seq1[ai, 1] : seq1[ai] + loop do + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + bj += 1 + break unless bj < b_size + end + end + end + + # last B? + if bj == b_size && ai < a_size + if callbacks.respond_to?(:finished_b) && !run_finished_b + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[-1, 1] : seq2[-1] + event = Diff::LCS::ContextChange.new("<", ai, ax, b_size - 1, bx) + event = yield event if block_given? + callbacks.finished_b(event) + run_finished_b = true + else + bx = string ? seq2[bj, 1] : seq2[bj] + loop do + ax = string ? seq1[ai, 1] : seq1[ai] + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + ai += 1 + break unless bj < b_size + end + end + end + + if ai < a_size + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + ai += 1 + end + + if bj < b_size + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + bj += 1 + end + end + end + + # #traverse_balanced is an alternative to #traverse_sequences. It uses a + # different algorithm to iterate through the entries in the computed longest + # common subsequence. Instead of viewing the changes as insertions or + # deletions from one of the sequences, #traverse_balanced will report + # changes between the sequences. + # + # The arguments to #traverse_balanced are the two sequences to traverse and a + # callback object, like this: + # + # traverse_balanced(seq1, seq2, Diff::LCS::ContextDiffCallbacks.new) + # + # #sdiff is implemented with #traverse_balanced. + # + # == Callback Methods + # + # Optional callback methods are emphasized. + # + # callbacks#match:: Called when +a+ and +b+ are pointing to + # common elements in +A+ and +B+. + # callbacks#discard_a:: Called when +a+ is pointing to an + # element not in +B+. + # callbacks#discard_b:: Called when +b+ is pointing to an + # element not in +A+. + # callbacks#change:: Called when +a+ and +b+ are pointing to + # the same relative position, but + # A[a] and B[b] are not + # the same; a change has + # occurred. + # + # #traverse_balanced might be a bit slower than #traverse_sequences, + # noticeable only while processing huge amounts of data. + # + # == Algorithm + # + # a---+ + # v + # A = a b c e h j l m n p + # B = b c d e f j k l m r s t + # ^ + # b---+ + # + # === Matches + # + # If there are two arrows (+a+ and +b+) pointing to elements of sequences +A+ + # and +B+, the arrows will initially point to the first elements of their + # respective sequences. #traverse_sequences will advance the arrows through + # the sequences one element at a time, calling a method on the user-specified + # callback object before each advance. It will advance the arrows in such a + # way that if there are elements A[i] and B[j] which are + # both equal and part of the longest common subsequence, there will be some + # moment during the execution of #traverse_sequences when arrow +a+ is + # pointing to A[i] and arrow +b+ is pointing to B[j]. When + # this happens, #traverse_sequences will call callbacks#match and + # then it will advance both arrows. + # + # === Discards + # + # Otherwise, one of the arrows is pointing to an element of its sequence that + # is not part of the longest common subsequence. #traverse_sequences will + # advance that arrow and will call callbacks#discard_a or + # callbacks#discard_b, depending on which arrow it advanced. + # + # === Changes + # + # If both +a+ and +b+ point to elements that are not part of the longest + # common subsequence, then #traverse_sequences will try to call + # callbacks#change and advance both arrows. If + # callbacks#change is not implemented, then + # callbacks#discard_a and callbacks#discard_b will be + # called in turn. + # + # The methods for callbacks#match, callbacks#discard_a, + # callbacks#discard_b, and callbacks#change are invoked + # with an event comprising the action ("=", "+", "-", or "!", respectively), + # the indexes +i+ and +j+, and the elements A[i] and B[j]. + # Return values are discarded by #traverse_balanced. + # + # === Context + # + # Note that +i+ and +j+ may not be the same index position, even if +a+ and + # +b+ are considered to be pointing to matching or changed elements. + def traverse_balanced(seq1, seq2, callbacks = Diff::LCS::BalancedCallbacks) + matches = Diff::LCS::Internals.lcs(seq1, seq2) + a_size = seq1.size + b_size = seq2.size + ai = bj = mb = 0 + ma = -1 + string = seq1.is_a?(String) + + # Process all the lines in the match vector. + loop do + # Find next match indexes +ma+ and +mb+ + loop do + ma += 1 + break unless ma < matches.size && matches[ma].nil? + end + + break if ma >= matches.size # end of matches? + + mb = matches[ma] + + # Change(seq2) + while (ai < ma) || (bj < mb) + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[bj, 1] : seq2[bj] + + case [(ai < ma), (bj < mb)] + when [true, true] + if callbacks.respond_to?(:change) + event = Diff::LCS::ContextChange.new("!", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.change(event) + ai += 1 + else + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + ai += 1 + ax = string ? seq1[ai, 1] : seq1[ai] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + end + + bj += 1 + when [true, false] + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + ai += 1 + when [false, true] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + bj += 1 + end + end + + # Match + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[bj, 1] : seq2[bj] + event = Diff::LCS::ContextChange.new("=", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.match(event) + ai += 1 + bj += 1 + end + + while (ai < a_size) || (bj < b_size) + ax = string ? seq1[ai, 1] : seq1[ai] + bx = string ? seq2[bj, 1] : seq2[bj] + + case [(ai < a_size), (bj < b_size)] + when [true, true] + if callbacks.respond_to?(:change) + event = Diff::LCS::ContextChange.new("!", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.change(event) + ai += 1 + else + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + ai += 1 + ax = string ? seq1[ai, 1] : seq1[ai] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + end + + bj += 1 + when [true, false] + event = Diff::LCS::ContextChange.new("-", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_a(event) + ai += 1 + when [false, true] + event = Diff::LCS::ContextChange.new("+", ai, ax, bj, bx) + event = yield event if block_given? + callbacks.discard_b(event) + bj += 1 + end + end + end + + # standard:disable Style/HashSyntax + PATCH_MAP = { # :nodoc: + :patch => {"+" => "+", "-" => "-", "!" => "!", "=" => "="}.freeze, + :unpatch => {"+" => "-", "-" => "+", "!" => "!", "=" => "="}.freeze + }.freeze + # standard:enable Style/HashSyntax + + # Applies a +patchset+ to the sequence +src+ according to the +direction+ + # (:patch or :unpatch), producing a new sequence. + # + # If the +direction+ is not specified, Diff::LCS::patch will attempt to + # discover the direction of the +patchset+. + # + # A +patchset+ can be considered to apply forward (:patch) if the + # following expression is true: + # + # patch(s1, diff(s1, s2)) -> s2 + # + # A +patchset+ can be considered to apply backward (:unpatch) if the + # following expression is true: + # + # patch(s2, diff(s1, s2)) -> s1 + # + # If the +patchset+ contains no changes, the +src+ value will be returned as + # either src.dup or +src+. A +patchset+ can be deemed as having no + # changes if the following predicate returns true: + # + # patchset.empty? or + # patchset.flatten(1).all? { |change| change.unchanged? } + # + # === Patchsets + # + # A +patchset+ is always an enumerable sequence of changes, hunks of changes, + # or a mix of the two. A hunk of changes is an enumerable sequence of + # changes: + # + # [ # patchset + # # change + # [ # hunk + # # change + # ] + # ] + # + # The +patch+ method accepts patchsets that are enumerable sequences + # containing either Diff::LCS::Change objects (or a subclass) or the array + # representations of those objects. Prior to application, array + # representations of Diff::LCS::Change objects will be reified. + def patch(src, patchset, direction = nil) + # Normalize the patchset. + has_changes, patchset = Diff::LCS::Internals.analyze_patchset(patchset) + + return src.respond_to?(:dup) ? src.dup : src unless has_changes + + string = src.is_a?(String) + # Start with a new empty type of the source's class + res = src.class.new + + direction ||= Diff::LCS::Internals.intuit_diff_direction(src, patchset) + + ai = bj = 0 + + patch_map = PATCH_MAP[direction] + + patchset.each do |change| + # Both Change and ContextChange support #action + action = patch_map[change.action] + + case change + when Diff::LCS::ContextChange + case direction + when :patch + el = change.new_element + op = change.old_position + np = change.new_position + when :unpatch + el = change.old_element + op = change.new_position + np = change.old_position + end + + case action + when "-" # Remove details from the old string + while ai < op + res << (string ? src[ai, 1] : src[ai]) + ai += 1 + bj += 1 + end + ai += 1 + when "+" + while bj < np + res << (string ? src[ai, 1] : src[ai]) + ai += 1 + bj += 1 + end + + res << el + bj += 1 + when "=" + # This only appears in sdiff output with the SDiff callback. + # Therefore, we only need to worry about dealing with a single + # element. + res << el + + ai += 1 + bj += 1 + when "!" + while ai < op + res << (string ? src[ai, 1] : src[ai]) + ai += 1 + bj += 1 + end + + bj += 1 + ai += 1 + + res << el + end + when Diff::LCS::Change + case action + when "-" + while ai < change.position + res << (string ? src[ai, 1] : src[ai]) + ai += 1 + bj += 1 + end + ai += 1 + when "+" + while bj < change.position + res << (string ? src[ai, 1] : src[ai]) + ai += 1 + bj += 1 + end + + bj += 1 + + res << change.element + end + end + end + + while ai < src.size + res << (string ? src[ai, 1] : src[ai]) + ai += 1 + bj += 1 + end + + res + end + + # Given a set of patchset, convert the current version to the prior version. + # Does no auto-discovery. + def unpatch!(src, patchset) + patch(src, patchset, :unpatch) + end + + # Given a set of patchset, convert the current version to the next version. + # Does no auto-discovery. + def patch!(src, patchset) + patch(src, patchset, :patch) + end +end + +require "diff/lcs/backports" diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/array.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/array.rb new file mode 100644 index 000000000..663918a2f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/array.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require "diff/lcs" + +class Array + include Diff::LCS +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/backports.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/backports.rb new file mode 100644 index 000000000..6543c8a4b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/backports.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +unless 0.respond_to?(:positive?) + class Fixnum # standard:disable Lint/UnifiedInteger + def positive? + self > 0 + end + + def negative? + self < 0 + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/block.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/block.rb new file mode 100644 index 000000000..226ed6fa9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/block.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +# A block is an operation removing, adding, or changing a group of items. +# Basically, this is just a list of changes, where each change adds or +# deletes a single item. Used by bin/ldiff. +class Diff::LCS::Block + attr_reader :changes, :insert, :remove + + def initialize(chunk) + @changes = [] + @insert = [] + @remove = [] + + chunk.each do |item| + @changes << item + @remove << item if item.deleting? + @insert << item if item.adding? + end + end + + def diff_size + @insert.size - @remove.size + end + + def op + case [@remove.empty?, @insert.empty?] + when [false, false] + "!" + when [false, true] + "-" + when [true, false] + "+" + else # [true, true] + "^" + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/callbacks.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/callbacks.rb new file mode 100644 index 000000000..2c5a77912 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/callbacks.rb @@ -0,0 +1,327 @@ +# frozen_string_literal: true + +require "diff/lcs/change" + +module Diff::LCS + # This callback object implements the default set of callback events, + # which only returns the event itself. Note that #finished_a and + # #finished_b are not implemented -- I haven't yet figured out where they + # would be useful. + # + # Note that this is intended to be called as is, e.g., + # + # Diff::LCS.LCS(seq1, seq2, Diff::LCS::DefaultCallbacks) + class DefaultCallbacks + class << self + # Called when two items match. + def match(event) + event + end + + # Called when the old value is discarded in favour of the new value. + def discard_a(event) + event + end + + # Called when the new value is discarded in favour of the old value. + def discard_b(event) + event + end + + # Called when both the old and new values have changed. + def change(event) + event + end + + private :new + end + end + + # An alias for DefaultCallbacks that is used in + # Diff::LCS#traverse_sequences. + # + # Diff::LCS.LCS(seq1, seq2, Diff::LCS::SequenceCallbacks) + SequenceCallbacks = DefaultCallbacks + + # An alias for DefaultCallbacks that is used in + # Diff::LCS#traverse_balanced. + # + # Diff::LCS.LCS(seq1, seq2, Diff::LCS::BalancedCallbacks) + BalancedCallbacks = DefaultCallbacks + + def self.callbacks_for(callbacks) + callbacks.new + rescue + callbacks + end +end + +# This will produce a compound array of simple diff change objects. Each +# element in the #diffs array is a +hunk+ or +hunk+ array, where each +# element in each +hunk+ array is a single Change object representing the +# addition or removal of a single element from one of the two tested +# sequences. The +hunk+ provides the full context for the changes. +# +# diffs = Diff::LCS.diff(seq1, seq2) +# # This example shows a simplified array format. +# # [ [ [ '-', 0, 'a' ] ], # 1 +# # [ [ '+', 2, 'd' ] ], # 2 +# # [ [ '-', 4, 'h' ], # 3 +# # [ '+', 4, 'f' ] ], +# # [ [ '+', 6, 'k' ] ], # 4 +# # [ [ '-', 8, 'n' ], # 5 +# # [ '-', 9, 'p' ], +# # [ '+', 9, 'r' ], +# # [ '+', 10, 's' ], +# # [ '+', 11, 't' ] ] ] +# +# There are five hunks here. The first hunk says that the +a+ at position 0 +# of the first sequence should be deleted ('-'). The second hunk +# says that the +d+ at position 2 of the second sequence should be inserted +# ('+'). The third hunk says that the +h+ at position 4 of the +# first sequence should be removed and replaced with the +f+ from position 4 +# of the second sequence. The other two hunks are described similarly. +# +# === Use +# +# This callback object must be initialised and is used by the Diff::LCS#diff +# method. +# +# cbo = Diff::LCS::DiffCallbacks.new +# Diff::LCS.LCS(seq1, seq2, cbo) +# cbo.finish +# +# Note that the call to #finish is absolutely necessary, or the last set of +# changes will not be visible. Alternatively, can be used as: +# +# cbo = Diff::LCS::DiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) } +# +# The necessary #finish call will be made. +# +# === Simplified Array Format +# +# The simplified array format used in the example above can be obtained +# with: +# +# require 'pp' +# pp diffs.map { |e| e.map { |f| f.to_a } } +class Diff::LCS::DiffCallbacks + # Returns the difference set collected during the diff process. + attr_reader :diffs + + def initialize # :yields: self + @hunk = [] + @diffs = [] + + return unless block_given? + + begin + yield self + ensure + finish + end + end + + # Finalizes the diff process. If an unprocessed hunk still exists, then it + # is appended to the diff list. + def finish + finish_hunk + end + + def match(_event) + finish_hunk + end + + def discard_a(event) + @hunk << Diff::LCS::Change.new("-", event.old_position, event.old_element) + end + + def discard_b(event) + @hunk << Diff::LCS::Change.new("+", event.new_position, event.new_element) + end + + def finish_hunk + @diffs << @hunk unless @hunk.empty? + @hunk = [] + end + private :finish_hunk +end + +# This will produce a compound array of contextual diff change objects. Each +# element in the #diffs array is a "hunk" array, where each element in each +# "hunk" array is a single change. Each change is a Diff::LCS::ContextChange +# that contains both the old index and new index values for the change. The +# "hunk" provides the full context for the changes. Both old and new objects +# will be presented for changed objects. +nil+ will be substituted for a +# discarded object. +# +# seq1 = %w(a b c e h j l m n p) +# seq2 = %w(b c d e f j k l m r s t) +# +# diffs = Diff::LCS.diff(seq1, seq2, Diff::LCS::ContextDiffCallbacks) +# # This example shows a simplified array format. +# # [ [ [ '-', [ 0, 'a' ], [ 0, nil ] ] ], # 1 +# # [ [ '+', [ 3, nil ], [ 2, 'd' ] ] ], # 2 +# # [ [ '-', [ 4, 'h' ], [ 4, nil ] ], # 3 +# # [ '+', [ 5, nil ], [ 4, 'f' ] ] ], +# # [ [ '+', [ 6, nil ], [ 6, 'k' ] ] ], # 4 +# # [ [ '-', [ 8, 'n' ], [ 9, nil ] ], # 5 +# # [ '+', [ 9, nil ], [ 9, 'r' ] ], +# # [ '-', [ 9, 'p' ], [ 10, nil ] ], +# # [ '+', [ 10, nil ], [ 10, 's' ] ], +# # [ '+', [ 10, nil ], [ 11, 't' ] ] ] ] +# +# The five hunks shown are comprised of individual changes; if there is a +# related set of changes, they are still shown individually. +# +# This callback can also be used with Diff::LCS#sdiff, which will produce +# results like: +# +# diffs = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextCallbacks) +# # This example shows a simplified array format. +# # [ [ [ "-", [ 0, "a" ], [ 0, nil ] ] ], # 1 +# # [ [ "+", [ 3, nil ], [ 2, "d" ] ] ], # 2 +# # [ [ "!", [ 4, "h" ], [ 4, "f" ] ] ], # 3 +# # [ [ "+", [ 6, nil ], [ 6, "k" ] ] ], # 4 +# # [ [ "!", [ 8, "n" ], [ 9, "r" ] ], # 5 +# # [ "!", [ 9, "p" ], [ 10, "s" ] ], +# # [ "+", [ 10, nil ], [ 11, "t" ] ] ] ] +# +# The five hunks are still present, but are significantly shorter in total +# presentation, because changed items are shown as changes ("!") instead of +# potentially "mismatched" pairs of additions and deletions. +# +# The result of this operation is similar to that of +# Diff::LCS::SDiffCallbacks. They may be compared as: +# +# s = Diff::LCS.sdiff(seq1, seq2).reject { |e| e.action == "=" } +# c = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks).flatten(1) +# +# s == c # -> true +# +# === Use +# +# This callback object must be initialised and can be used by the +# Diff::LCS#diff or Diff::LCS#sdiff methods. +# +# cbo = Diff::LCS::ContextDiffCallbacks.new +# Diff::LCS.LCS(seq1, seq2, cbo) +# cbo.finish +# +# Note that the call to #finish is absolutely necessary, or the last set of +# changes will not be visible. Alternatively, can be used as: +# +# cbo = Diff::LCS::ContextDiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) } +# +# The necessary #finish call will be made. +# +# === Simplified Array Format +# +# The simplified array format used in the example above can be obtained +# with: +# +# require 'pp' +# pp diffs.map { |e| e.map { |f| f.to_a } } +class Diff::LCS::ContextDiffCallbacks < Diff::LCS::DiffCallbacks + def discard_a(event) + @hunk << Diff::LCS::ContextChange.simplify(event) + end + + def discard_b(event) + @hunk << Diff::LCS::ContextChange.simplify(event) + end + + def change(event) + @hunk << Diff::LCS::ContextChange.simplify(event) + end +end + +# This will produce a simple array of diff change objects. Each element in +# the #diffs array is a single ContextChange. In the set of #diffs provided +# by SDiffCallbacks, both old and new objects will be presented for both +# changed and unchanged objects. +nil+ will be substituted +# for a discarded object. +# +# The diffset produced by this callback, when provided to Diff::LCS#sdiff, +# will compute and display the necessary components to show two sequences +# and their minimized differences side by side, just like the Unix utility +# +sdiff+. +# +# same same +# before | after +# old < - +# - > new +# +# seq1 = %w(a b c e h j l m n p) +# seq2 = %w(b c d e f j k l m r s t) +# +# diffs = Diff::LCS.sdiff(seq1, seq2) +# # This example shows a simplified array format. +# # [ [ "-", [ 0, "a"], [ 0, nil ] ], +# # [ "=", [ 1, "b"], [ 0, "b" ] ], +# # [ "=", [ 2, "c"], [ 1, "c" ] ], +# # [ "+", [ 3, nil], [ 2, "d" ] ], +# # [ "=", [ 3, "e"], [ 3, "e" ] ], +# # [ "!", [ 4, "h"], [ 4, "f" ] ], +# # [ "=", [ 5, "j"], [ 5, "j" ] ], +# # [ "+", [ 6, nil], [ 6, "k" ] ], +# # [ "=", [ 6, "l"], [ 7, "l" ] ], +# # [ "=", [ 7, "m"], [ 8, "m" ] ], +# # [ "!", [ 8, "n"], [ 9, "r" ] ], +# # [ "!", [ 9, "p"], [ 10, "s" ] ], +# # [ "+", [ 10, nil], [ 11, "t" ] ] ] +# +# The result of this operation is similar to that of +# Diff::LCS::ContextDiffCallbacks. They may be compared as: +# +# s = Diff::LCS.sdiff(seq1, seq2).reject { |e| e.action == "=" } +# c = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks).flatten(1) +# +# s == c # -> true +# +# === Use +# +# This callback object must be initialised and is used by the Diff::LCS#sdiff +# method. +# +# cbo = Diff::LCS::SDiffCallbacks.new +# Diff::LCS.LCS(seq1, seq2, cbo) +# +# As with the other initialisable callback objects, +# Diff::LCS::SDiffCallbacks can be initialised with a block. As there is no +# "fininishing" to be done, this has no effect on the state of the object. +# +# cbo = Diff::LCS::SDiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) } +# +# === Simplified Array Format +# +# The simplified array format used in the example above can be obtained +# with: +# +# require 'pp' +# pp diffs.map { |e| e.to_a } +class Diff::LCS::SDiffCallbacks + # Returns the difference set collected during the diff process. + attr_reader :diffs + + def initialize # :yields: self + @diffs = [] + yield self if block_given? + end + + def match(event) + @diffs << Diff::LCS::ContextChange.simplify(event) + end + + def discard_a(event) + @diffs << Diff::LCS::ContextChange.simplify(event) + end + + def discard_b(event) + @diffs << Diff::LCS::ContextChange.simplify(event) + end + + def change(event) + @diffs << Diff::LCS::ContextChange.simplify(event) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/change.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/change.rb new file mode 100644 index 000000000..714d78c8a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/change.rb @@ -0,0 +1,174 @@ +# frozen_string_literal: true + +# Represents a simplistic (non-contextual) change. Represents the removal or +# addition of an element from either the old or the new sequenced +# enumerable. +class Diff::LCS::Change + IntClass = 1.class # Fixnum is deprecated in Ruby 2.4 # standard:disable Naming/ConstantName + + # The only actions valid for changes are '+' (add), '-' (delete), '=' + # (no change), '!' (changed), '<' (tail changes from first sequence), or + # '>' (tail changes from second sequence). The last two ('<>') are only + # found with Diff::LCS::diff and Diff::LCS::sdiff. + VALID_ACTIONS = %w[+ - = ! > <].freeze + + def self.valid_action?(action) + VALID_ACTIONS.include? action + end + + # Returns the action this Change represents. + attr_reader :action + + # Returns the position of the Change. + attr_reader :position + # Returns the sequence element of the Change. + attr_reader :element + + def initialize(*args) + @action, @position, @element = *args + + fail "Invalid Change Action '#{@action}'" unless Diff::LCS::Change.valid_action?(@action) + fail "Invalid Position Type" unless @position.is_a? IntClass + end + + def inspect(*_args) + "#<#{self.class}: #{to_a.inspect}>" + end + + def to_a + [@action, @position, @element] + end + + alias_method :to_ary, :to_a + + def self.from_a(arr) + arr = arr.flatten(1) + case arr.size + when 5 + Diff::LCS::ContextChange.new(*arr[0...5]) + when 3 + Diff::LCS::Change.new(*arr[0...3]) + else + fail "Invalid change array format provided." + end + end + + include Comparable + + def ==(other) + (self.class == other.class) and + (action == other.action) and + (position == other.position) and + (element == other.element) + end + + def <=>(other) + r = action <=> other.action + r = position <=> other.position if r.zero? + r = element <=> other.element if r.zero? + r + end + + def adding? + @action == "+" + end + + def deleting? + @action == "-" + end + + def unchanged? + @action == "=" + end + + def changed? + @action == "!" + end + + def finished_a? + @action == ">" + end + + def finished_b? + @action == "<" + end +end + +# Represents a contextual change. Contains the position and values of the +# elements in the old and the new sequenced enumerables as well as the action +# taken. +class Diff::LCS::ContextChange < Diff::LCS::Change + # We don't need these two values. + undef :position + undef :element + + # Returns the old position being changed. + attr_reader :old_position + # Returns the new position being changed. + attr_reader :new_position + # Returns the old element being changed. + attr_reader :old_element + # Returns the new element being changed. + attr_reader :new_element + + def initialize(*args) + @action, @old_position, @old_element, @new_position, @new_element = *args + + fail "Invalid Change Action '#{@action}'" unless Diff::LCS::Change.valid_action?(@action) + fail "Invalid (Old) Position Type" unless @old_position.nil? || @old_position.is_a?(IntClass) + fail "Invalid (New) Position Type" unless @new_position.nil? || @new_position.is_a?(IntClass) + end + + def to_a + [ + @action, + [@old_position, @old_element], + [@new_position, @new_element] + ] + end + + alias_method :to_ary, :to_a + + def self.from_a(arr) + Diff::LCS::Change.from_a(arr) + end + + # Simplifies a context change for use in some diff callbacks. '<' actions + # are converted to '-' and '>' actions are converted to '+'. + def self.simplify(event) + ea = event.to_a + + case ea[0] + when "-" + ea[2][1] = nil + when "<" + ea[0] = "-" + ea[2][1] = nil + when "+" + ea[1][1] = nil + when ">" + ea[0] = "+" + ea[1][1] = nil + end + + Diff::LCS::ContextChange.from_a(ea) + end + + def ==(other) + (self.class == other.class) and + (@action == other.action) and + (@old_position == other.old_position) and + (@new_position == other.new_position) and + (@old_element == other.old_element) and + (@new_element == other.new_element) + end + + def <=>(other) + r = @action <=> other.action + r = @old_position <=> other.old_position if r.zero? + r = @new_position <=> other.new_position if r.zero? + r = @old_element <=> other.old_element if r.zero? + r = @new_element <=> other.new_element if r.zero? + r + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/htmldiff.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/htmldiff.rb new file mode 100644 index 000000000..90732438c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/htmldiff.rb @@ -0,0 +1,160 @@ +# frozen_string_literal: true + +require "erb" + +# Produce a simple HTML diff view. +class Diff::LCS::HTMLDiff + class << self + # standard:disable ThreadSafety/ClassAndModuleAttributes + attr_accessor :can_expand_tabs # :nodoc: + # standard:enable ThreadSafety/ClassAndModuleAttributes + end + self.can_expand_tabs = true + + class Callbacks # :nodoc: + attr_accessor :output + attr_accessor :match_class + attr_accessor :only_a_class + attr_accessor :only_b_class + + def initialize(output, options = {}) + @output = output + options ||= {} + + @match_class = options[:match_class] || "match" + @only_a_class = options[:only_a_class] || "only_a" + @only_b_class = options[:only_b_class] || "only_b" + end + + def htmlize(element, css_class) + element = " " if element.empty? + %(

#{element}
\n) + end + private :htmlize + + # This will be called with both lines are the same + def match(event) + @output << htmlize(event.old_element, :match_class) + end + + # This will be called when there is a line in A that isn't in B + def discard_a(event) + @output << htmlize(event.old_element, :only_a_class) + end + + # This will be called when there is a line in B that isn't in A + def discard_b(event) + @output << htmlize(event.new_element, :only_b_class) + end + end + + # standard:disable Style/HashSyntax + DEFAULT_OPTIONS = { + :expand_tabs => nil, + :output => nil, + :css => nil, + :title => nil + }.freeze + # standard:enable Style/HashSyntax + + # standard:disable Layout/HeredocIndentation + DEFAULT_CSS = <<-CSS +body { margin: 0; } +.diff +{ + border: 1px solid black; + margin: 1em 2em; +} +p +{ + margin-left: 2em; +} +pre +{ + padding-left: 1em; + margin: 0; + font-family: Inconsolata, Consolas, Lucida, Courier, monospaced; + white-space: pre; +} +.match { } +.only_a +{ + background-color: #fdd; + color: red; + text-decoration: line-through; +} +.only_b +{ + background-color: #ddf; + color: blue; + border-left: 3px solid blue +} +h1 { margin-left: 2em; } + CSS + # standard:enable Layout/HeredocIndentation + + def initialize(left, right, options = nil) + @left = left + @right = right + @options = options + + @options = DEFAULT_OPTIONS.dup if @options.nil? + end + + def verify_options + @options[:expand_tabs] ||= 4 + @options[:expand_tabs] = 4 if @options[:expand_tabs].negative? + + @options[:output] ||= $stdout + + @options[:css] ||= DEFAULT_CSS.dup + + @options[:title] ||= "diff" + end + private :verify_options + + attr_reader :options + + def run + verify_options + + if @options[:expand_tabs].positive? && self.class.can_expand_tabs + formatter = Text::Format.new + formatter.tabstop = @options[:expand_tabs] + + @left.map! { |line| formatter.expand(line.chomp) } + @right.map! { |line| formatter.expand(line.chomp) } + end + + @left.map! { |line| ERB::Util.html_escape(line.chomp) } + @right.map! { |line| ERB::Util.html_escape(line.chomp) } + + # standard:disable Layout/HeredocIndentation + @options[:output] << <<-OUTPUT + + + #{@options[:title]} + + + +

#{@options[:title]}

+

Legend: Only in Old  + Only in New

+
+ OUTPUT + # standard:enable Layout/HeredocIndentation + + callbacks = Callbacks.new(@options[:output]) + Diff::LCS.traverse_sequences(@left, @right, callbacks) + + # standard:disable Layout/HeredocIndentation + @options[:output] << <<-OUTPUT +
+ + + OUTPUT + # standard:enable Layout/HeredocIndentation + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/hunk.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/hunk.rb new file mode 100644 index 000000000..24b33bca3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/hunk.rb @@ -0,0 +1,379 @@ +# frozen_string_literal: true + +require "diff/lcs/block" + +# A Hunk is a group of Blocks which overlap because of the context surrounding +# each block. (So if we're not using context, every hunk will contain one +# block.) Used in the diff program (bin/ldiff). +class Diff::LCS::Hunk + OLD_DIFF_OP_ACTION = {"+" => "a", "-" => "d", "!" => "c"}.freeze # :nodoc: + ED_DIFF_OP_ACTION = {"+" => "a", "-" => "d", "!" => "c"}.freeze # :nodoc: + + private_constant :OLD_DIFF_OP_ACTION, :ED_DIFF_OP_ACTION if respond_to?(:private_constant) + + # Create a hunk using references to both the old and new data, as well as the + # piece of data. + def initialize(data_old, data_new, piece, flag_context, file_length_difference) + # At first, a hunk will have just one Block in it + @blocks = [Diff::LCS::Block.new(piece)] + + if @blocks[0].remove.empty? && @blocks[0].insert.empty? + fail "Cannot build a hunk from #{piece.inspect}; has no add or remove actions" + end + + if String.method_defined?(:encoding) + @preferred_data_encoding = data_old.fetch(0) { data_new.fetch(0) { "" } }.encoding + end + + @data_old = data_old + @data_new = data_new + @old_empty = data_old.empty? || (data_old.size == 1 && data_old[0].empty?) + @new_empty = data_new.empty? || (data_new.size == 1 && data_new[0].empty?) + + before = after = file_length_difference + after += @blocks[0].diff_size + @file_length_difference = after # The caller must get this manually + @max_diff_size = @blocks.map { |e| e.diff_size.abs }.max + + # Save the start & end of each array. If the array doesn't exist (e.g., + # we're only adding items in this block), then figure out the line number + # based on the line number of the other file and the current difference in + # file lengths. + if @blocks[0].remove.empty? + a1 = a2 = nil + else + a1 = @blocks[0].remove[0].position + a2 = @blocks[0].remove[-1].position + end + + if @blocks[0].insert.empty? + b1 = b2 = nil + else + b1 = @blocks[0].insert[0].position + b2 = @blocks[0].insert[-1].position + end + + @start_old = a1 || (b1 - before) + @start_new = b1 || (a1 + before) + @end_old = a2 || (b2 - after) + @end_new = b2 || (a2 + after) + + self.flag_context = flag_context + end + + attr_reader :blocks + attr_reader :start_old, :start_new + attr_reader :end_old, :end_new + attr_reader :file_length_difference + + # Change the "start" and "end" fields to note that context should be added + # to this hunk. + attr_accessor :flag_context + undef :flag_context= + def flag_context=(context) # :nodoc: # standard:disable Lint/DuplicateMethods + return if context.nil? || context.zero? + + add_start = (context > @start_old) ? @start_old : context + + @start_old -= add_start + @start_new -= add_start + + old_size = @data_old.size + + add_end = + if (@end_old + context) >= old_size + old_size - @end_old - 1 + else + context + end + + @end_old += add_end + @end_new += add_end + end + + # Merges this hunk and the provided hunk together if they overlap. Returns + # a truthy value so that if there is no overlap, you can know the merge + # was skipped. + def merge(hunk) + return unless overlaps?(hunk) + + @start_old = hunk.start_old + @start_new = hunk.start_new + blocks.unshift(*hunk.blocks) + end + alias_method :unshift, :merge + + # Determines whether there is an overlap between this hunk and the + # provided hunk. This will be true if the difference between the two hunks + # start or end positions is within one position of each other. + def overlaps?(hunk) + hunk and (((@start_old - hunk.end_old) <= 1) or + ((@start_new - hunk.end_new) <= 1)) + end + + # Returns a diff string based on a format. + def diff(format, last = false) + case format + when :old + old_diff(last) + when :unified + unified_diff(last) + when :context + context_diff(last) + when :ed + self + when :reverse_ed, :ed_finish + ed_diff(format, last) + else + fail "Unknown diff format #{format}." + end + end + + # Note that an old diff can't have any context. Therefore, we know that + # there's only one block in the hunk. + def old_diff(last = false) + warn "Expecting only one block in an old diff hunk!" if @blocks.size > 1 + + block = @blocks[0] + + if last + old_missing_newline = !@old_empty && missing_last_newline?(@data_old) + new_missing_newline = !@new_empty && missing_last_newline?(@data_new) + end + + # Calculate item number range. Old diff range is just like a context + # diff range, except the ranges are on one line with the action between + # them. + s = encode("#{context_range(:old, ",")}#{OLD_DIFF_OP_ACTION[block.op]}#{context_range(:new, ",")}\n") + # If removing anything, just print out all the remove lines in the hunk + # which is just all the remove lines in the block. + unless block.remove.empty? + @data_old[@start_old..@end_old].each { |e| s << encode("< ") + e.chomp + encode("\n") } + end + + s << encode("\\ No newline at end of file\n") if old_missing_newline && !new_missing_newline + s << encode("---\n") if block.op == "!" + + unless block.insert.empty? + @data_new[@start_new..@end_new].each { |e| s << encode("> ") + e.chomp + encode("\n") } + end + + s << encode("\\ No newline at end of file\n") if new_missing_newline && !old_missing_newline + + s + end + private :old_diff + + def unified_diff(last = false) + # Calculate item number range. + s = encode("@@ -#{unified_range(:old)} +#{unified_range(:new)} @@\n") + + # Outlist starts containing the hunk of the old file. Removing an item + # just means putting a '-' in front of it. Inserting an item requires + # getting it from the new file and splicing it in. We splice in + # +num_added+ items. Remove blocks use +num_added+ because splicing + # changed the length of outlist. + # + # We remove +num_removed+ items. Insert blocks use +num_removed+ + # because their item numbers -- corresponding to positions in the NEW + # file -- don't take removed items into account. + lo, hi, num_added, num_removed = @start_old, @end_old, 0, 0 + + # standard:disable Performance/UnfreezeString + outlist = @data_old[lo..hi].map { |e| String.new("#{encode(" ")}#{e.chomp}") } + # standard:enable Performance/UnfreezeString + + last_block = blocks[-1] + + if last + old_missing_newline = !@old_empty && missing_last_newline?(@data_old) + new_missing_newline = !@new_empty && missing_last_newline?(@data_new) + end + + @blocks.each do |block| + block.remove.each do |item| + op = item.action.to_s # - + offset = item.position - lo + num_added + outlist[offset][0, 1] = encode(op) + num_removed += 1 + end + + if last && block == last_block && old_missing_newline && !new_missing_newline + outlist << encode('\\ No newline at end of file') + num_removed += 1 + end + + block.insert.each do |item| + op = item.action.to_s # + + offset = item.position - @start_new + num_removed + outlist[offset, 0] = encode(op) + @data_new[item.position].chomp + num_added += 1 + end + end + + outlist << encode('\\ No newline at end of file') if last && new_missing_newline + + s << outlist.join(encode("\n")) + + s + end + private :unified_diff + + def context_diff(last = false) + s = encode("***************\n") + s << encode("*** #{context_range(:old, ",")} ****\n") + r = context_range(:new, ",") + + if last + old_missing_newline = missing_last_newline?(@data_old) + new_missing_newline = missing_last_newline?(@data_new) + end + + # Print out file 1 part for each block in context diff format if there + # are any blocks that remove items + lo, hi = @start_old, @end_old + removes = @blocks.reject { |e| e.remove.empty? } + + unless removes.empty? + # standard:disable Performance/UnfreezeString + outlist = @data_old[lo..hi].map { |e| String.new("#{encode(" ")}#{e.chomp}") } + # standard:enable Performance/UnfreezeString + + last_block = removes[-1] + + removes.each do |block| + block.remove.each do |item| + outlist[item.position - lo][0, 1] = encode(block.op) # - or ! + end + + if last && block == last_block && old_missing_newline + outlist << encode('\\ No newline at end of file') + end + end + + s << outlist.join(encode("\n")) << encode("\n") + end + + s << encode("--- #{r} ----\n") + lo, hi = @start_new, @end_new + inserts = @blocks.reject { |e| e.insert.empty? } + + unless inserts.empty? + # standard:disable Performance/UnfreezeString + outlist = @data_new[lo..hi].map { |e| String.new("#{encode(" ")}#{e.chomp}") } + # standard:enable Performance/UnfreezeString + + last_block = inserts[-1] + + inserts.each do |block| + block.insert.each do |item| + outlist[item.position - lo][0, 1] = encode(block.op) # + or ! + end + + if last && block == last_block && new_missing_newline + outlist << encode('\\ No newline at end of file') + end + end + s << outlist.join(encode("\n")) + end + + s + end + private :context_diff + + def ed_diff(format, last) + warn "Expecting only one block in an old diff hunk!" if @blocks.size > 1 + if last + # ed script doesn't support well incomplete lines + warn ": No newline at end of file\n" if !@old_empty && missing_last_newline?(@data_old) + warn ": No newline at end of file\n" if !@new_empty && missing_last_newline?(@data_new) + + if @blocks[0].op == "!" + return +"" if @blocks[0].changes[0].element == @blocks[0].changes[1].element + "\n" + return +"" if @blocks[0].changes[0].element + "\n" == @blocks[0].changes[1].element + end + end + + s = + if format == :reverse_ed + encode("#{ED_DIFF_OP_ACTION[@blocks[0].op]}#{context_range(:old, " ")}\n") + else + encode("#{context_range(:old, ",")}#{ED_DIFF_OP_ACTION[@blocks[0].op]}\n") + end + + unless @blocks[0].insert.empty? + @data_new[@start_new..@end_new].each do |e| + s << e.chomp + encode("\n") + end + s << encode(".\n") + end + s + end + private :ed_diff + + # Generate a range of item numbers to print. Only print 1 number if the + # range has only one item in it. Otherwise, it's 'start,end' + def context_range(mode, op) + case mode + when :old + s, e = (@start_old + 1), (@end_old + 1) + when :new + s, e = (@start_new + 1), (@end_new + 1) + end + + (s < e) ? "#{s}#{op}#{e}" : e.to_s + end + private :context_range + + # Generate a range of item numbers to print for unified diff. Print number + # where block starts, followed by number of lines in the block + # (don't print number of lines if it's 1) + def unified_range(mode) + case mode + when :old + return "0,0" if @old_empty + s, e = (@start_old + 1), (@end_old + 1) + when :new + return "0,0" if @new_empty + s, e = (@start_new + 1), (@end_new + 1) + end + + length = e - s + 1 + + (length <= 1) ? e.to_s : "#{s},#{length}" + end + private :unified_range + + def missing_last_newline?(data) + newline = encode("\n") + + if data[-2] + data[-2].end_with?(newline) && !data[-1].end_with?(newline) + elsif data[-1] + !data[-1].end_with?(newline) + else + true + end + end + + if String.method_defined?(:encoding) + def encode(literal, target_encoding = @preferred_data_encoding) + literal.encode target_encoding + end + + def encode_as(string, *args) + args.map { |arg| arg.encode(string.encoding) } + end + else + def encode(literal, _target_encoding = nil) + literal + end + + def encode_as(_string, *args) + args + end + end + + private :encode + private :encode_as +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/internals.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/internals.rb new file mode 100644 index 000000000..8a9160a6a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/internals.rb @@ -0,0 +1,308 @@ +# frozen_string_literal: true + +class << Diff::LCS + def diff_traversal(method, seq1, seq2, callbacks, &block) + callbacks = callbacks_for(callbacks) + case method + when :diff + traverse_sequences(seq1, seq2, callbacks) + when :sdiff + traverse_balanced(seq1, seq2, callbacks) + end + callbacks.finish if callbacks.respond_to? :finish + + if block + callbacks.diffs.map do |hunk| + if hunk.is_a? Array + hunk.map { |hunk_block| block[hunk_block] } + else + block[hunk] + end + end + else + callbacks.diffs + end + end + private :diff_traversal +end + +module Diff::LCS::Internals # :nodoc: +end + +class << Diff::LCS::Internals + # Compute the longest common subsequence between the sequenced + # Enumerables +a+ and +b+. The result is an array whose contents is such + # that + # + # result = Diff::LCS::Internals.lcs(a, b) + # result.each_with_index do |e, i| + # assert_equal(a[i], b[e]) unless e.nil? + # end + def lcs(a, b) + a_start = b_start = 0 + a_finish = a.size - 1 + b_finish = b.size - 1 + vector = [] + + # Collect any common elements at the beginning... + while (a_start <= a_finish) && (b_start <= b_finish) && (a[a_start] == b[b_start]) + vector[a_start] = b_start + a_start += 1 + b_start += 1 + end + + # Now the end... + while (a_start <= a_finish) && (b_start <= b_finish) && (a[a_finish] == b[b_finish]) + vector[a_finish] = b_finish + a_finish -= 1 + b_finish -= 1 + end + + # Now, compute the equivalence classes of positions of elements. + # An explanation for how this works: https://codeforces.com/topic/92191 + b_matches = position_hash(b, b_start..b_finish) + + thresh = [] + links = [] + string = a.is_a?(String) + + (a_start..a_finish).each do |i| + ai = string ? a[i, 1] : a[i] + bm = b_matches[ai] + k = nil + bm.reverse_each do |j| + # Although the threshold check is not mandatory for this to work, + # it may have an optimization purpose + # An attempt to remove it: https://github.com/halostatue/diff-lcs/pull/72 + # Why it is reintroduced: https://github.com/halostatue/diff-lcs/issues/78 + if k && (thresh[k] > j) && (thresh[k - 1] < j) + thresh[k] = j + else + k = replace_next_larger(thresh, j, k) + end + links[k] = [k.positive? ? links[k - 1] : nil, i, j] unless k.nil? + end + end + + unless thresh.empty? + link = links[thresh.size - 1] + until link.nil? + vector[link[1]] = link[2] + link = link[0] + end + end + + vector + end + + # This method will analyze the provided patchset to provide a single-pass + # normalization (conversion of the array form of Diff::LCS::Change objects to + # the object form of same) and detection of whether the patchset represents + # changes to be made. + def analyze_patchset(patchset, depth = 0) + fail "Patchset too complex" if depth > 1 + + has_changes = false + new_patchset = [] + + # Format: + # [ # patchset + # # hunk (change) + # [ # hunk + # # change + # ] + # ] + + patchset.each do |hunk| + case hunk + when Diff::LCS::Change + has_changes ||= !hunk.unchanged? + new_patchset << hunk + when Array + # Detect if the 'hunk' is actually an array-format change object. + if Diff::LCS::Change.valid_action? hunk[0] + hunk = Diff::LCS::Change.from_a(hunk) + has_changes ||= !hunk.unchanged? + new_patchset << hunk + else + with_changes, hunk = analyze_patchset(hunk, depth + 1) + has_changes ||= with_changes + new_patchset.concat(hunk) + end + else + fail ArgumentError, "Cannot normalise a hunk of class #{hunk.class}." + end + end + + [has_changes, new_patchset] + end + + # Examine the patchset and the source to see in which direction the + # patch should be applied. + # + # WARNING: By default, this examines the whole patch, so this could take + # some time. This also works better with Diff::LCS::ContextChange or + # Diff::LCS::Change as its source, as an array will cause the creation + # of one of the above. + def intuit_diff_direction(src, patchset, limit = nil) + string = src.is_a?(String) + count = left_match = left_miss = right_match = right_miss = 0 + + patchset.each do |change| + count += 1 + + case change + when Diff::LCS::ContextChange + le = string ? src[change.old_position, 1] : src[change.old_position] + re = string ? src[change.new_position, 1] : src[change.new_position] + + case change.action + when "-" # Remove details from the old string + if le == change.old_element + left_match += 1 + else + left_miss += 1 + end + when "+" + if re == change.new_element + right_match += 1 + else + right_miss += 1 + end + when "=" + left_miss += 1 if le != change.old_element + right_miss += 1 if re != change.new_element + when "!" + if le == change.old_element + left_match += 1 + elsif re == change.new_element + right_match += 1 + else + left_miss += 1 + right_miss += 1 + end + end + when Diff::LCS::Change + # With a simplistic change, we can't tell the difference between + # the left and right on '!' actions, so we ignore those. On '=' + # actions, if there's a miss, we miss both left and right. + element = string ? src[change.position, 1] : src[change.position] + + case change.action + when "-" + if element == change.element + left_match += 1 + else + left_miss += 1 + end + when "+" + if element == change.element + right_match += 1 + else + right_miss += 1 + end + when "=" + if element != change.element + left_miss += 1 + right_miss += 1 + end + end + end + + break if !limit.nil? && (count > limit) + end + + no_left = left_match.zero? && left_miss.positive? + no_right = right_match.zero? && right_miss.positive? + + case [no_left, no_right] + when [false, true] + :patch + when [true, false] + :unpatch + else + case left_match <=> right_match + when 1 + if left_miss.zero? + :patch + else + :unpatch + end + when -1 + if right_miss.zero? + :unpatch + else + :patch + end + else + fail "The provided patchset does not appear to apply to the provided \ +enumerable as either source or destination value." + end + end + end + + # Find the place at which +value+ would normally be inserted into the + # Enumerable. If that place is already occupied by +value+, do nothing + # and return +nil+. If the place does not exist (i.e., it is off the end + # of the Enumerable), add it to the end. Otherwise, replace the element + # at that point with +value+. It is assumed that the Enumerable's values + # are numeric. + # + # This operation preserves the sort order. + def replace_next_larger(enum, value, last_index = nil) + # Off the end? + if enum.empty? || (value > enum[-1]) + enum << value + return enum.size - 1 + end + + # Binary search for the insertion point + last_index ||= enum.size - 1 + first_index = 0 + while first_index <= last_index + i = (first_index + last_index) >> 1 + + found = enum[i] + + return nil if value == found + + if value > found + first_index = i + 1 + else + last_index = i - 1 + end + end + + # The insertion point is in first_index; overwrite the next larger + # value. + enum[first_index] = value + first_index + end + private :replace_next_larger + + # If +vector+ maps the matching elements of another collection onto this + # Enumerable, compute the inverse of +vector+ that maps this Enumerable + # onto the collection. (Currently unused.) + def inverse_vector(a, vector) + inverse = a.dup + (0...vector.size).each do |i| + inverse[vector[i]] = i unless vector[i].nil? + end + inverse + end + private :inverse_vector + + # Returns a hash mapping each element of an Enumerable to the set of + # positions it occupies in the Enumerable, optionally restricted to the + # elements specified in the range of indexes specified by +interval+. + def position_hash(enum, interval) + string = enum.is_a?(String) + hash = Hash.new { |h, k| h[k] = [] } + interval.each do |i| + k = string ? enum[i, 1] : enum[i] + hash[k] << i + end + hash + end + private :position_hash +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/ldiff.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/ldiff.rb new file mode 100644 index 000000000..6442c9bf8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/ldiff.rb @@ -0,0 +1,189 @@ +# frozen_string_literal: true + +require "optparse" +require "diff/lcs/hunk" + +class Diff::LCS::Ldiff # :nodoc: + # standard:disable Layout/HeredocIndentation + BANNER = <<-COPYRIGHT +ldiff #{Diff::LCS::VERSION} + Copyright 2004-2025 Austin Ziegler + + Part of Diff::LCS. + https://github.com/halostatue/diff-lcs + + This program is free software. It may be redistributed and/or modified under + the terms of the GPL version 2 (or later), the Perl Artistic licence, or the + MIT licence. + COPYRIGHT + # standard:enable Layout/HeredocIndentation + + InputInfo = Struct.new(:filename, :data, :stat) do + def initialize(filename) + super(filename, ::File.read(filename), ::File.stat(filename)) + end + end + + attr_reader :format, :lines # :nodoc: + attr_reader :file_old, :file_new # :nodoc: + attr_reader :data_old, :data_new # :nodoc: + + def self.run(args, input = $stdin, output = $stdout, error = $stderr) # :nodoc: + new.run(args, input, output, error) + end + + def initialize + @binary = nil + @format = :old + @lines = 0 + end + + def run(args, _input = $stdin, output = $stdout, error = $stderr) # :nodoc: + args.options do |o| + o.banner = "Usage: #{File.basename($0)} [options] oldfile newfile" + o.separator "" + o.on( + "-c", "-C", "--context [LINES]", Integer, + "Displays a context diff with LINES lines", "of context. Default 3 lines." + ) do |ctx| + @format = :context + @lines = ctx || 3 + end + o.on( + "-u", "-U", "--unified [LINES]", Integer, + "Displays a unified diff with LINES lines", "of context. Default 3 lines." + ) do |ctx| + @format = :unified + @lines = ctx || 3 + end + o.on("-e", "Creates an 'ed' script to change", "oldfile to newfile.") do |_ctx| + @format = :ed + end + o.on("-f", "Creates an 'ed' script to change", "oldfile to newfile in reverse order.") do |_ctx| + @format = :reverse_ed + end + o.on( + "-a", "--text", + "Treat the files as text and compare them", "line-by-line, even if they do not seem", "to be text." + ) do |_txt| + @binary = false + end + o.on("--binary", "Treats the files as binary.") do |_bin| + @binary = true + end + o.on("-q", "--brief", "Report only whether or not the files", "differ, not the details.") do |_ctx| + @format = :report + end + o.on_tail("--help", "Shows this text.") do + error << o + return 0 + end + o.on_tail("--version", "Shows the version of Diff::LCS.") do + error << Diff::LCS::Ldiff::BANNER + return 0 + end + o.on_tail "" + o.on_tail 'By default, runs produces an "old-style" diff, with output like UNIX diff.' + o.parse! + end + + unless args.size == 2 + error << args.options + return 127 + end + + # Defaults are for old-style diff + @format ||= :old + @lines ||= 0 + + file_old, file_new = *ARGV + diff?( + InputInfo.new(file_old), + InputInfo.new(file_new), + @format, + output, + binary: @binary, + lines: @lines + ) ? 1 : 0 + end + + def diff?(info_old, info_new, format, output, binary: nil, lines: 0) + case format + when :context + char_old = "*" * 3 + char_new = "-" * 3 + when :unified + char_old = "-" * 3 + char_new = "+" * 3 + end + + # After we've read up to a certain point in each file, the number of + # items we've read from each file will differ by FLD (could be 0). + file_length_difference = 0 + + # Test binary status + if binary.nil? + old_bin = info_old.data[0, 4096].include?("\0") + new_bin = info_new.data[0, 4096].include?("\0") + binary = old_bin || new_bin + end + + # diff yields lots of pieces, each of which is basically a Block object + if binary + has_diffs = (info_old.data != info_new.data) + if format != :report + if has_diffs + output << "Binary files #{info_old.filename} and #{info_new.filename} differ\n" + return true + end + return false + end + else + data_old = info_old.data.lines.to_a + data_new = info_new.data.lines.to_a + diffs = Diff::LCS.diff(data_old, data_new) + return false if diffs.empty? + end + + case format + when :report + output << "Files #{info_old.filename} and #{info_new.filename} differ\n" + return true + when :unified, :context + ft = info_old.stat.mtime.localtime.strftime("%Y-%m-%d %H:%M:%S.000000000 %z") + output << "#{char_old} #{info_old.filename}\t#{ft}\n" + ft = info_new.stat.mtime.localtime.strftime("%Y-%m-%d %H:%M:%S.000000000 %z") + output << "#{char_new} #{info_new.filename}\t#{ft}\n" + when :ed + real_output = output + output = [] + end + + # Loop over hunks. If a hunk overlaps with the last hunk, join them. + # Otherwise, print out the old one. + oldhunk = hunk = nil + diffs.each do |piece| + begin + hunk = Diff::LCS::Hunk.new(data_old, data_new, piece, lines, file_length_difference) + file_length_difference = hunk.file_length_difference + + next unless oldhunk + next if lines.positive? && hunk.merge(oldhunk) + + output << oldhunk.diff(format) + output << "\n" if format == :unified + ensure + oldhunk = hunk + end + end + + last = oldhunk.diff(format, true) + last << "\n" unless last.is_a?(Diff::LCS::Hunk) || last.empty? || last.end_with?("\n") + + output << last + + output.reverse_each { |e| real_output << e.diff(:ed_finish, e == output[0]) } if format == :ed + + true + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/string.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/string.rb new file mode 100644 index 000000000..9ab32e92b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/string.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class String + include Diff::LCS +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/version.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/version.rb new file mode 100644 index 000000000..82830e3c4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/lib/diff/lcs/version.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module Diff + module LCS + VERSION = "1.6.2" + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/mise.toml b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/mise.toml new file mode 100644 index 000000000..22418cf13 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/mise.toml @@ -0,0 +1,5 @@ +[tools] +ruby = "3.4" + +[env] +MAINTENANCE = "true" diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/change_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/change_spec.rb new file mode 100644 index 000000000..42533ae03 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/change_spec.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe Diff::LCS::Change do + describe "an add" do + subject { described_class.new("+", 0, "element") } + it { should_not be_deleting } + it { should be_adding } + it { should_not be_unchanged } + it { should_not be_changed } + it { should_not be_finished_a } + it { should_not be_finished_b } + end + + describe "a delete" do + subject { described_class.new("-", 0, "element") } + it { should be_deleting } + it { should_not be_adding } + it { should_not be_unchanged } + it { should_not be_changed } + it { should_not be_finished_a } + it { should_not be_finished_b } + end + + describe "an unchanged" do + subject { described_class.new("=", 0, "element") } + it { should_not be_deleting } + it { should_not be_adding } + it { should be_unchanged } + it { should_not be_changed } + it { should_not be_finished_a } + it { should_not be_finished_b } + end + + describe "a changed" do + subject { described_class.new("!", 0, "element") } + it { should_not be_deleting } + it { should_not be_adding } + it { should_not be_unchanged } + it { should be_changed } + it { should_not be_finished_a } + it { should_not be_finished_b } + end + + describe "a finished_a" do + subject { described_class.new(">", 0, "element") } + it { should_not be_deleting } + it { should_not be_adding } + it { should_not be_unchanged } + it { should_not be_changed } + it { should be_finished_a } + it { should_not be_finished_b } + end + + describe "a finished_b" do + subject { described_class.new("<", 0, "element") } + it { should_not be_deleting } + it { should_not be_adding } + it { should_not be_unchanged } + it { should_not be_changed } + it { should_not be_finished_a } + it { should be_finished_b } + end + + describe "as array" do + it "should be converted" do + action, position, element = described_class.new("!", 0, "element") + expect(action).to eq "!" + expect(position).to eq 0 + expect(element).to eq "element" + end + end +end + +describe Diff::LCS::ContextChange do + describe "as array" do + it "should be converted" do + action, (old_position, old_element), (new_position, new_element) = + described_class.new("!", 1, "old_element", 2, "new_element") + + expect(action).to eq "!" + expect(old_position).to eq 1 + expect(old_element).to eq "old_element" + expect(new_position).to eq 2 + expect(new_element).to eq "new_element" + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/diff_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/diff_spec.rb new file mode 100644 index 000000000..869f09861 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/diff_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe Diff::LCS, ".diff" do + include Diff::LCS::SpecHelper::Matchers + + it "correctly diffs seq1 to seq2" do + diff_s1_s2 = Diff::LCS.diff(seq1, seq2) + expect(change_diff(correct_forward_diff)).to eq(diff_s1_s2) + end + + it "correctly diffs seq2 to seq1" do + diff_s2_s1 = Diff::LCS.diff(seq2, seq1) + expect(change_diff(correct_backward_diff)).to eq(diff_s2_s1) + end + + it "correctly diffs against an empty sequence" do + diff = Diff::LCS.diff(word_sequence, []) + correct_diff = [ + [ + ["-", 0, "abcd"], + ["-", 1, "efgh"], + ["-", 2, "ijkl"], + ["-", 3, "mnopqrstuvwxyz"] + ] + ] + + expect(change_diff(correct_diff)).to eq(diff) + + diff = Diff::LCS.diff([], word_sequence) + correct_diff.each do |hunk| + hunk.each { |change| change[0] = "+" } + end + expect(change_diff(correct_diff)).to eq(diff) + end + + it "correctly diffs 'xx' and 'xaxb'" do + left = "xx" + right = "xaxb" + expect(Diff::LCS.patch(left, Diff::LCS.diff(left, right))).to eq(right) + end + + it "returns an empty diff with (hello, hello)" do + expect(Diff::LCS.diff(hello, hello)).to be_empty + end + + it "returns an empty diff with (hello_ary, hello_ary)" do + expect(Diff::LCS.diff(hello_ary, hello_ary)).to be_empty + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/123_x b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/123_x new file mode 100644 index 000000000..cd34c2355 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/123_x @@ -0,0 +1,2 @@ +123 +x diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/456_x b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/456_x new file mode 100644 index 000000000..9a823ac31 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/456_x @@ -0,0 +1,2 @@ +456 +x diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/aX b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/aX new file mode 100644 index 000000000..5765d6a73 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/aX @@ -0,0 +1 @@ +aX diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/bXaX b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/bXaX new file mode 100644 index 000000000..a1c813dbd --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/bXaX @@ -0,0 +1 @@ +bXaX diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds1.csv b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds1.csv new file mode 100644 index 000000000..9ac84281d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds1.csv @@ -0,0 +1,50 @@ +1,3 +2,7 +3,13 +4,21 +5,31 +6,43 +7,57 +8,73 +9,91 +10,111 +11,133 +12,157 +13,183 +14,211 +15,241 +16,273 +17,307 +18,343 +19,381 +20,421 +21,463 +22,507 +23,553 +24,601 +25,651 +26,703 +27,757 +28,813 +29,871 +30,931 +31,993 +32,1057 +33,1123 +34,1191 +35,1261 +36,1333 +37,1407 +38,1483 +39,1561 +40,1641 +41,1723 +42,1807 +43,1893 +44,1981 +45,2071 +46,2163 +47,2257 +48,2353 +49,2451 +50,2500 \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds2.csv b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds2.csv new file mode 100644 index 000000000..797de7619 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ds2.csv @@ -0,0 +1,51 @@ + 1,3 +2,7 +3,13 +4,21 +5,31 +6,42 +7,57 +8,73 +9,91 +10,111 +11,133 +12,157 +13,183 +14,211 +15,241 +16,273 +17,307 +18,343 +19,200 +20,421 +21,463 +22,507 +23,553 +24,601 +25,651 +26,703 +27,757 +28,813 +29,871 +30,931 +31,123 +32,1057 +33,1123 +34,1000 +35,1261 +36,1333 +37,1407 +38,1483 +39,1561 +40,1641 +41,1723 +42,1807 +43,1893 +44,1981 +45,2071 +46,2163 +47,1524 +48,2353 +49,2451 +50,2500 +51,2520 diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/empty b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/empty new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/file1.bin b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/file1.bin new file mode 100644 index 0000000000000000000000000000000000000000..f76dd238ade08917e6712764a16a22005a50573d GIT binary patch literal 1 IcmZPo000310RR91 literal 0 HcmV?d00001 diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/file2.bin b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/file2.bin new file mode 100644 index 0000000000000000000000000000000000000000..ba18e3dcc474e720bdd955f81c8848324c862840 GIT binary patch literal 6 Ncmc~u&B@7U000O<0u=xN literal 0 HcmV?d00001 diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines new file mode 100644 index 000000000..f384549cb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines @@ -0,0 +1,4 @@ +one +two +three +four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines_with_missing_new_line b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines_with_missing_new_line new file mode 100644 index 000000000..c40a3bdda --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/four_lines_with_missing_new_line @@ -0,0 +1,4 @@ +one +two +three +four \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-e new file mode 100644 index 000000000..1e8a89cda --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-e @@ -0,0 +1 @@ +No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-f new file mode 100644 index 000000000..1e8a89cda --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line1-f @@ -0,0 +1 @@ +No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-e new file mode 100644 index 000000000..1e8a89cda --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-e @@ -0,0 +1 @@ +No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-f new file mode 100644 index 000000000..1e8a89cda --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/diff.missing_new_line2-f @@ -0,0 +1 @@ +No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-e new file mode 100644 index 000000000..8ed031956 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-e @@ -0,0 +1,2 @@ +: No newline at end of file +: No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-f new file mode 100644 index 000000000..8ed031956 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.chef-f @@ -0,0 +1,2 @@ +: No newline at end of file +: No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-e new file mode 100644 index 000000000..397dd5b92 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-e @@ -0,0 +1 @@ +: No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-f new file mode 100644 index 000000000..397dd5b92 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line1-f @@ -0,0 +1 @@ +: No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-e new file mode 100644 index 000000000..f9493ef94 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-e @@ -0,0 +1 @@ +: No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-f new file mode 100644 index 000000000..f9493ef94 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/error.diff.missing_new_line2-f @@ -0,0 +1 @@ +: No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff new file mode 100644 index 000000000..fa1a34796 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff @@ -0,0 +1,4 @@ +1c1 +< aX +--- +> bXaX diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-c new file mode 100644 index 000000000..0e1ad9986 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-c @@ -0,0 +1,7 @@ +*** spec/fixtures/aX 2020-06-23 11:15:32.000000000 -0400 +--- spec/fixtures/bXaX 2020-06-23 11:15:32.000000000 -0400 +*************** +*** 1 **** +! aX +--- 1 ---- +! bXaX diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-e new file mode 100644 index 000000000..13e0f7f05 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-e @@ -0,0 +1,3 @@ +1c +bXaX +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-f new file mode 100644 index 000000000..77710c76e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-f @@ -0,0 +1,3 @@ +c1 +bXaX +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-u new file mode 100644 index 000000000..b84f71805 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff-u @@ -0,0 +1,5 @@ +--- spec/fixtures/aX 2020-06-23 11:15:32.000000000 -0400 ++++ spec/fixtures/bXaX 2020-06-23 11:15:32.000000000 -0400 +@@ -1 +1 @@ +-aX ++bXaX diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1 new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-c new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-e new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-f new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin1-u new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2 new file mode 100644 index 000000000..41b625cdf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2 @@ -0,0 +1 @@ +Binary files spec/fixtures/file1.bin and spec/fixtures/file2.bin differ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-c new file mode 100644 index 000000000..41b625cdf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-c @@ -0,0 +1 @@ +Binary files spec/fixtures/file1.bin and spec/fixtures/file2.bin differ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-e new file mode 100644 index 000000000..41b625cdf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-e @@ -0,0 +1 @@ +Binary files spec/fixtures/file1.bin and spec/fixtures/file2.bin differ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-f new file mode 100644 index 000000000..41b625cdf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-f @@ -0,0 +1 @@ +Binary files spec/fixtures/file1.bin and spec/fixtures/file2.bin differ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-u new file mode 100644 index 000000000..41b625cdf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.bin2-u @@ -0,0 +1 @@ +Binary files spec/fixtures/file1.bin and spec/fixtures/file2.bin differ diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef new file mode 100644 index 000000000..8b98efb1e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef @@ -0,0 +1,4 @@ +3c3 +< "description": "hi" +--- +> "description": "lo" diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-c new file mode 100644 index 000000000..efbfa1957 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-c @@ -0,0 +1,15 @@ +*** spec/fixtures/old-chef 2020-06-23 23:18:20.000000000 -0400 +--- spec/fixtures/new-chef 2020-06-23 23:18:20.000000000 -0400 +*************** +*** 1,4 **** + { + "name": "x", +! "description": "hi" + } +\ No newline at end of file +--- 1,4 ---- + { + "name": "x", +! "description": "lo" + } +\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-e new file mode 100644 index 000000000..775d881c1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-e @@ -0,0 +1,3 @@ +3c + "description": "lo" +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-f new file mode 100644 index 000000000..9bf1e67f0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-f @@ -0,0 +1,3 @@ +c3 + "description": "lo" +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-u new file mode 100644 index 000000000..dbacd8898 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef-u @@ -0,0 +1,9 @@ +--- spec/fixtures/old-chef 2020-06-23 23:18:20.000000000 -0400 ++++ spec/fixtures/new-chef 2020-06-23 23:18:20.000000000 -0400 +@@ -1,4 +1,4 @@ + { + "name": "x", +- "description": "hi" ++ "description": "lo" + } +\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2 new file mode 100644 index 000000000..496b3dc8c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2 @@ -0,0 +1,7 @@ +2d1 +< recipe[b::default] +14a14,17 +> recipe[o::new] +> recipe[p::new] +> recipe[q::new] +> recipe[r::new] diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-c new file mode 100644 index 000000000..8349a7a80 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-c @@ -0,0 +1,20 @@ +*** spec/fixtures/old-chef2 2020-06-30 09:43:35.000000000 -0400 +--- spec/fixtures/new-chef2 2020-06-30 09:44:32.000000000 -0400 +*************** +*** 1,5 **** + recipe[a::default] +- recipe[b::default] + recipe[c::default] + recipe[d::default] + recipe[e::default] +--- 1,4 ---- +*************** +*** 12,14 **** +--- 11,17 ---- + recipe[l::default] + recipe[m::default] + recipe[n::default] ++ recipe[o::new] ++ recipe[p::new] ++ recipe[q::new] ++ recipe[r::new] diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-d b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-d new file mode 100644 index 000000000..ca32a4903 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-d @@ -0,0 +1,7 @@ +d2 +a14 +recipe[o::new] +recipe[p::new] +recipe[q::new] +recipe[r::new] +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-e new file mode 100644 index 000000000..89f3fa07d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-e @@ -0,0 +1,7 @@ +14a +recipe[o::new] +recipe[p::new] +recipe[q::new] +recipe[r::new] +. +2d diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-f new file mode 100644 index 000000000..ca32a4903 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-f @@ -0,0 +1,7 @@ +d2 +a14 +recipe[o::new] +recipe[p::new] +recipe[q::new] +recipe[r::new] +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-u new file mode 100644 index 000000000..ef025c7e4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.chef2-u @@ -0,0 +1,16 @@ +--- spec/fixtures/old-chef2 2020-06-30 09:43:35.000000000 -0400 ++++ spec/fixtures/new-chef2 2020-06-30 09:44:32.000000000 -0400 +@@ -1,5 +1,4 @@ + recipe[a::default] +-recipe[b::default] + recipe[c::default] + recipe[d::default] + recipe[e::default] +@@ -12,3 +11,7 @@ + recipe[l::default] + recipe[m::default] + recipe[n::default] ++recipe[o::new] ++recipe[p::new] ++recipe[q::new] ++recipe[r::new] diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines new file mode 100644 index 000000000..e2afc3161 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines @@ -0,0 +1,5 @@ +0a1,4 +> one +> two +> three +> four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-c new file mode 100644 index 000000000..be0e82748 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-c @@ -0,0 +1,9 @@ +*** spec/fixtures/empty 2025-01-31 12:14:52.856031635 +0100 +--- spec/fixtures/four_lines 2025-01-31 12:13:45.476036544 +0100 +*************** +*** 0 **** +--- 1,4 ---- ++ one ++ two ++ three ++ four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-e new file mode 100644 index 000000000..f8f92feb4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-e @@ -0,0 +1,6 @@ +0a +one +two +three +four +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-f new file mode 100644 index 000000000..f02e5a038 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-f @@ -0,0 +1,6 @@ +a0 +one +two +three +four +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-u new file mode 100644 index 000000000..60bd55cca --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.empty.vs.four_lines-u @@ -0,0 +1,7 @@ +--- spec/fixtures/empty 2025-01-31 12:14:52.856031635 +0100 ++++ spec/fixtures/four_lines 2025-01-31 12:13:45.476036544 +0100 +@@ -0,0 +1,4 @@ ++one ++two ++three ++four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty new file mode 100644 index 000000000..67d0a5849 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty @@ -0,0 +1,5 @@ +1,4d0 +< one +< two +< three +< four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-c new file mode 100644 index 000000000..b216344db --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-c @@ -0,0 +1,9 @@ +*** spec/fixtures/four_lines 2025-01-31 12:13:45.476036544 +0100 +--- spec/fixtures/empty 2025-01-31 12:14:52.856031635 +0100 +*************** +*** 1,4 **** +- one +- two +- three +- four +--- 0 ---- diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-e new file mode 100644 index 000000000..c821d7c9b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-e @@ -0,0 +1 @@ +1,4d diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-f new file mode 100644 index 000000000..442bd5ab6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-f @@ -0,0 +1 @@ +d1 4 diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-u new file mode 100644 index 000000000..79e6d752c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.four_lines.vs.empty-u @@ -0,0 +1,7 @@ +--- spec/fixtures/four_lines 2025-01-31 12:13:45.476036544 +0100 ++++ spec/fixtures/empty 2025-01-31 12:14:52.856031635 +0100 +@@ -1,4 +0,0 @@ +-one +-two +-three +-four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context new file mode 100644 index 000000000..4335560d0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context @@ -0,0 +1,4 @@ +1c1 +< 123 +--- +> 456 diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-c new file mode 100644 index 000000000..4b759fa6f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-c @@ -0,0 +1,9 @@ +*** spec/fixtures/123_x 2025-01-31 17:00:17.070615716 +0100 +--- spec/fixtures/456_x 2025-01-31 16:58:26.380624827 +0100 +*************** +*** 1,2 **** +! 123 + x +--- 1,2 ---- +! 456 + x diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-e new file mode 100644 index 000000000..7a8334b3d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-e @@ -0,0 +1,3 @@ +1c +456 +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-f new file mode 100644 index 000000000..97223a8cb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-f @@ -0,0 +1,3 @@ +c1 +456 +. diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-u new file mode 100644 index 000000000..7fbf0e2ee --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.issue95_trailing_context-u @@ -0,0 +1,6 @@ +--- spec/fixtures/123_x 2025-01-31 17:00:17.070615716 +0100 ++++ spec/fixtures/456_x 2025-01-31 16:58:26.380624827 +0100 +@@ -1,2 +1,2 @@ +-123 ++456 + x diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1 new file mode 100644 index 000000000..c5cb113b8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1 @@ -0,0 +1,5 @@ +4c4 +< four +--- +> four +\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-c new file mode 100644 index 000000000..55d1ade53 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-c @@ -0,0 +1,14 @@ +*** spec/fixtures/four_lines 2025-01-31 12:17:43.926013315 +0100 +--- spec/fixtures/four_lines_with_missing_new_line 2025-01-31 12:17:43.926013315 +0100 +*************** +*** 1,4 **** + one + two + three +! four +--- 1,4 ---- + one + two + three +! four +\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-e new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-f new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-u new file mode 100644 index 000000000..010518bf8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line1-u @@ -0,0 +1,9 @@ +--- spec/fixtures/four_lines 2025-01-31 12:17:43.926013315 +0100 ++++ spec/fixtures/four_lines_with_missing_new_line 2025-01-31 12:17:43.926013315 +0100 +@@ -1,4 +1,4 @@ + one + two + three +-four ++four +\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2 new file mode 100644 index 000000000..10e432674 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2 @@ -0,0 +1,5 @@ +4c4 +< four +\ No newline at end of file +--- +> four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-c b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-c new file mode 100644 index 000000000..b43103059 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-c @@ -0,0 +1,14 @@ +*** spec/fixtures/four_lines_with_missing_new_line 2025-01-31 12:17:43.926013315 +0100 +--- spec/fixtures/four_lines 2025-01-31 12:17:43.926013315 +0100 +*************** +*** 1,4 **** + one + two + three +! four +\ No newline at end of file +--- 1,4 ---- + one + two + three +! four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-e b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-e new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-f b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-f new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-u b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-u new file mode 100644 index 000000000..2481a9e08 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/ldiff/output.diff.missing_new_line2-u @@ -0,0 +1,9 @@ +--- spec/fixtures/four_lines_with_missing_new_line 2025-01-31 12:17:43.926013315 +0100 ++++ spec/fixtures/four_lines 2025-01-31 12:17:43.926013315 +0100 +@@ -1,4 +1,4 @@ + one + two + three +-four +\ No newline at end of file ++four diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef new file mode 100644 index 000000000..d7babfec1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef @@ -0,0 +1,4 @@ +{ + "name": "x", + "description": "lo" +} \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef2 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef2 new file mode 100644 index 000000000..8213c73c1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/new-chef2 @@ -0,0 +1,17 @@ +recipe[a::default] +recipe[c::default] +recipe[d::default] +recipe[e::default] +recipe[f::default] +recipe[g::default] +recipe[h::default] +recipe[i::default] +recipe[j::default] +recipe[k::default] +recipe[l::default] +recipe[m::default] +recipe[n::default] +recipe[o::new] +recipe[p::new] +recipe[q::new] +recipe[r::new] diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef new file mode 100644 index 000000000..5f9e38b84 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef @@ -0,0 +1,4 @@ +{ + "name": "x", + "description": "hi" +} \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef2 b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef2 new file mode 100644 index 000000000..4a23407fd --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/fixtures/old-chef2 @@ -0,0 +1,14 @@ +recipe[a::default] +recipe[b::default] +recipe[c::default] +recipe[d::default] +recipe[e::default] +recipe[f::default] +recipe[g::default] +recipe[h::default] +recipe[i::default] +recipe[j::default] +recipe[k::default] +recipe[l::default] +recipe[m::default] +recipe[n::default] diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/hunk_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/hunk_spec.rb new file mode 100644 index 000000000..7d910399f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/hunk_spec.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +require "spec_helper" + +if String.method_defined?(:encoding) + require "diff/lcs/hunk" + + describe Diff::LCS::Hunk do + let(:old_data) { ["Tu a un carté avec {count} itéms".encode("UTF-16LE")] } + let(:new_data) { ["Tu a un carte avec {count} items".encode("UTF-16LE")] } + let(:pieces) { Diff::LCS.diff old_data, new_data } + let(:hunk) { Diff::LCS::Hunk.new(old_data, new_data, pieces[0], 3, 0) } + + it "produces a unified diff from the two pieces" do + expected = <<-EXPECTED.gsub(/^\s+/, "").encode("UTF-16LE").chomp + @@ -1 +1 @@ + -Tu a un carté avec {count} itéms + +Tu a un carte avec {count} items + EXPECTED + + expect(hunk.diff(:unified)).to eq(expected) + end + + it "produces a unified diff from the two pieces (last entry)" do + expected = <<-EXPECTED.gsub(/^\s+/, "").encode("UTF-16LE").chomp + @@ -1 +1 @@ + -Tu a un carté avec {count} itéms + +Tu a un carte avec {count} items + \\ No newline at end of file + EXPECTED + + expect(hunk.diff(:unified, true)).to eq(expected) + end + + it "produces a context diff from the two pieces" do + expected = <<-EXPECTED.gsub(/^\s+/, "").encode("UTF-16LE").chomp + *************** + *** 1 **** + ! Tu a un carté avec {count} itéms + --- 1 ---- + ! Tu a un carte avec {count} items + EXPECTED + + expect(hunk.diff(:context)).to eq(expected) + end + + it "produces an old diff from the two pieces" do + expected = <<-EXPECTED.gsub(/^ +/, "").encode("UTF-16LE").chomp + 1c1 + < Tu a un carté avec {count} itéms + --- + > Tu a un carte avec {count} items + + EXPECTED + + expect(hunk.diff(:old)).to eq(expected) + end + + it "produces a reverse ed diff from the two pieces" do + expected = <<-EXPECTED.gsub(/^ +/, "").encode("UTF-16LE").chomp + c1 + Tu a un carte avec {count} items + . + + EXPECTED + + expect(hunk.diff(:reverse_ed)).to eq(expected) + end + + context "with empty first data set" do + let(:old_data) { [] } + + it "produces a unified diff" do + expected = <<-EXPECTED.gsub(/^\s+/, "").encode("UTF-16LE").chomp + @@ -0,0 +1 @@ + +Tu a un carte avec {count} items + EXPECTED + + expect(hunk.diff(:unified)).to eq(expected) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/issues_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/issues_spec.rb new file mode 100644 index 000000000..5b0fb2a1c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/issues_spec.rb @@ -0,0 +1,160 @@ +# frozen_string_literal: true + +require "spec_helper" +require "diff/lcs/hunk" + +describe "Diff::LCS Issues" do + include Diff::LCS::SpecHelper::Matchers + + describe "issue #1" do + shared_examples "handles simple diffs" do |s1, s2, forward_diff| + before do + @diff_s1_s2 = Diff::LCS.diff(s1, s2) + end + + it "creates the correct diff" do + expect(change_diff(forward_diff)).to eq(@diff_s1_s2) + end + + it "creates the correct patch s1->s2" do + expect(Diff::LCS.patch(s1, @diff_s1_s2)).to eq(s2) + end + + it "creates the correct patch s2->s1" do + expect(Diff::LCS.patch(s2, @diff_s1_s2)).to eq(s1) + end + end + + describe "string" do + it_has_behavior "handles simple diffs", "aX", "bXaX", [ + [ + ["+", 0, "b"], + ["+", 1, "X"] + ] + ] + it_has_behavior "handles simple diffs", "bXaX", "aX", [ + [ + ["-", 0, "b"], + ["-", 1, "X"] + ] + ] + end + + describe "array" do + it_has_behavior "handles simple diffs", %w[a X], %w[b X a X], [ + [ + ["+", 0, "b"], + ["+", 1, "X"] + ] + ] + it_has_behavior "handles simple diffs", %w[b X a X], %w[a X], [ + [ + ["-", 0, "b"], + ["-", 1, "X"] + ] + ] + end + end + + describe "issue #57" do + it "should fail with a correct error" do + # standard:disable Style/HashSyntax + expect { + actual = {:category => "app.rack.request"} + expected = {:category => "rack.middleware", :title => "Anonymous Middleware"} + expect(actual).to eq(expected) + }.to raise_error(RSpec::Expectations::ExpectationNotMetError) + # standard:enable Style/HashSyntax + end + end + + describe "issue #65" do + def diff_lines(old_lines, new_lines) + file_length_difference = 0 + previous_hunk = nil + output = [] + + Diff::LCS.diff(old_lines, new_lines).each do |piece| + hunk = Diff::LCS::Hunk.new(old_lines, new_lines, piece, 3, file_length_difference) + file_length_difference = hunk.file_length_difference + maybe_contiguous_hunks = previous_hunk.nil? || hunk.merge(previous_hunk) + + output << "#{previous_hunk.diff(:unified)}\n" unless maybe_contiguous_hunks + + previous_hunk = hunk + end + output << "#{previous_hunk.diff(:unified, true)}\n" unless previous_hunk.nil? + output.join + end + + it "should not misplace the new chunk" do + old_data = [ + "recipe[a::default]", "recipe[b::default]", "recipe[c::default]", + "recipe[d::default]", "recipe[e::default]", "recipe[f::default]", + "recipe[g::default]", "recipe[h::default]", "recipe[i::default]", + "recipe[j::default]", "recipe[k::default]", "recipe[l::default]", + "recipe[m::default]", "recipe[n::default]" + ] + + new_data = [ + "recipe[a::default]", "recipe[c::default]", "recipe[d::default]", + "recipe[e::default]", "recipe[f::default]", "recipe[g::default]", + "recipe[h::default]", "recipe[i::default]", "recipe[j::default]", + "recipe[k::default]", "recipe[l::default]", "recipe[m::default]", + "recipe[n::default]", "recipe[o::new]", "recipe[p::new]", + "recipe[q::new]", "recipe[r::new]" + ] + + # standard:disable Layout/HeredocIndentation + expect(diff_lines(old_data, new_data)).to eq(<<-EODIFF) +@@ -1,5 +1,4 @@ + recipe[a::default] +-recipe[b::default] + recipe[c::default] + recipe[d::default] + recipe[e::default] +@@ -12,3 +11,7 @@ + recipe[l::default] + recipe[m::default] + recipe[n::default] ++recipe[o::new] ++recipe[p::new] ++recipe[q::new] ++recipe[r::new] + EODIFF + # standard:enable Layout/HeredocIndentation + end + end + + describe "issue #107 (replaces issue #60)" do + it "should produce unified output with correct context" do + # standard:disable Layout/HeredocIndentation + old_data = <<-DATA_OLD.strip.split("\n").map(&:chomp) +{ + "name": "x", + "description": "hi" +} + DATA_OLD + + new_data = <<-DATA_NEW.strip.split("\n").map(&:chomp) +{ + "name": "x", + "description": "lo" +} + DATA_NEW + + diff = ::Diff::LCS.diff(old_data, new_data) + hunk = ::Diff::LCS::Hunk.new(old_data, new_data, diff.first, 3, 0) + + expect(hunk.diff(:unified)).to eq(<<-EXPECTED.chomp) +@@ -1,4 +1,4 @@ + { + "name": "x", +- "description": "hi" ++ "description": "lo" + } + EXPECTED + # standard:enable Layout/HeredocIndentation + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/lcs_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/lcs_spec.rb new file mode 100644 index 000000000..c17f22f69 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/lcs_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe Diff::LCS::Internals, ".lcs" do + include Diff::LCS::SpecHelper::Matchers + + it "returns a meaningful LCS array with (seq1, seq2)" do + res = Diff::LCS::Internals.lcs(seq1, seq2) + # The result of the LCS (less the +nil+ values) must be as long as the + # correct result. + expect(res.compact.size).to eq(correct_lcs.size) + expect(res).to correctly_map_sequence(seq1).to_other_sequence(seq2) + + # Compact these transformations and they should be the correct LCS. + x_seq1 = (0...res.size).map { |ix| res[ix] ? seq1[ix] : nil }.compact + x_seq2 = (0...res.size).map { |ix| res[ix] ? seq2[res[ix]] : nil }.compact + + expect(x_seq1).to eq(correct_lcs) + expect(x_seq2).to eq(correct_lcs) + end + + it "returns all indexes with (hello, hello)" do + expect(Diff::LCS::Internals.lcs(hello, hello)).to \ + eq((0...hello.size).to_a) + end + + it "returns all indexes with (hello_ary, hello_ary)" do + expect(Diff::LCS::Internals.lcs(hello_ary, hello_ary)).to \ + eq((0...hello_ary.size).to_a) + end +end + +describe Diff::LCS, ".LCS" do + include Diff::LCS::SpecHelper::Matchers + + it "returns the correct compacted values from Diff::LCS.LCS" do + res = Diff::LCS.LCS(seq1, seq2) + expect(res).to eq(correct_lcs) + expect(res.compact).to eq(res) + end + + it "is transitive" do + res = Diff::LCS.LCS(seq2, seq1) + expect(res).to eq(correct_lcs) + expect(res.compact).to eq(res) + end + + it "returns %W(h e l l o) with (hello, hello)" do + expect(Diff::LCS.LCS(hello, hello)).to eq(hello.chars) + end + + it "returns hello_ary with (hello_ary, hello_ary)" do + expect(Diff::LCS.LCS(hello_ary, hello_ary)).to eq(hello_ary) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/ldiff_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/ldiff_spec.rb new file mode 100644 index 000000000..e13b56149 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/ldiff_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +require "spec_helper" + +RSpec.describe "bin/ldiff" do + include CaptureSubprocessIO + + # standard:disable Style/HashSyntax + fixtures = [ + {:name => "diff", :left => "aX", :right => "bXaX", :diff => 1}, + {:name => "diff.missing_new_line1", :left => "four_lines", :right => "four_lines_with_missing_new_line", :diff => 1}, + {:name => "diff.missing_new_line2", :left => "four_lines_with_missing_new_line", :right => "four_lines", :diff => 1}, + {:name => "diff.issue95_trailing_context", :left => "123_x", :right => "456_x", :diff => 1}, + {:name => "diff.four_lines.vs.empty", :left => "four_lines", :right => "empty", :diff => 1}, + {:name => "diff.empty.vs.four_lines", :left => "empty", :right => "four_lines", :diff => 1}, + {:name => "diff.bin1", :left => "file1.bin", :right => "file1.bin", :diff => 0}, + {:name => "diff.bin2", :left => "file1.bin", :right => "file2.bin", :diff => 1}, + {:name => "diff.chef", :left => "old-chef", :right => "new-chef", :diff => 1}, + {:name => "diff.chef2", :left => "old-chef2", :right => "new-chef2", :diff => 1} + ].product([nil, "-e", "-f", "-c", "-u"]).map { |(fixture, flag)| + fixture = fixture.dup + fixture[:flag] = flag + fixture + } + # standard:enable Style/HashSyntax + + def self.test_ldiff(fixture) + desc = [ + fixture[:flag], + "spec/fixtures/#{fixture[:left]}", + "spec/fixtures/#{fixture[:right]}", + "#", + "=>", + "spec/fixtures/ldiff/output.#{fixture[:name]}#{fixture[:flag]}" + ].join(" ") + + it desc do + stdout, stderr, status = run_ldiff(fixture) + expect(status).to eq(fixture[:diff]) + expect(stderr).to eq(read_fixture(fixture, mode: "error", allow_missing: true)) + expect(stdout).to eq(read_fixture(fixture, mode: "output", allow_missing: false)) + end + end + + fixtures.each do |fixture| + test_ldiff(fixture) + end + + def read_fixture(options, mode: "output", allow_missing: false) + fixture = options.fetch(:name) + flag = options.fetch(:flag) + name = "spec/fixtures/ldiff/#{mode}.#{fixture}#{flag}" + + return "" if !::File.exist?(name) && allow_missing + + data = IO.__send__(IO.respond_to?(:binread) ? :binread : :read, name) + clean_data(data, flag) + end + + def clean_data(data, flag) + data = + case flag + when "-c", "-u" + clean_output_timestamp(data) + else + data + end + data.gsub(/\r\n?/, "\n") + end + + def clean_output_timestamp(data) + data.gsub( + %r{ + ^ + [-+*]{3} + \s* + spec/fixtures/(\S+) + \s* + \d{4}-\d\d-\d\d + \s* + \d\d:\d\d:\d\d(?:\.\d+) + \s* + (?:[-+]\d{4}|Z) + }x, + '*** spec/fixtures/\1 0000-00-00 :00 =>:00 =>00.000000000 -0000' + ) + end + + def run_ldiff(options) + flag = options.fetch(:flag) + left = options.fetch(:left) + right = options.fetch(:right) + + stdout, stderr = capture_subprocess_io do + system("ruby -Ilib bin/ldiff #{flag} spec/fixtures/#{left} spec/fixtures/#{right}") + end + + [clean_data(stdout, flag), stderr, $?.exitstatus] + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/patch_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/patch_spec.rb new file mode 100644 index 000000000..8fc3ee25f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/patch_spec.rb @@ -0,0 +1,416 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe "Diff::LCS.patch" do + include Diff::LCS::SpecHelper::Matchers + + shared_examples "patch sequences correctly" do + it "correctly patches left-to-right (patch autodiscovery)" do + expect(Diff::LCS.patch(s1, patch_set)).to eq(s2) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(s1, patch_set, :patch)).to eq(s2) + expect(Diff::LCS.patch!(s1, patch_set)).to eq(s2) + end + + it "correctly patches right-to-left (unpatch autodiscovery)" do + expect(Diff::LCS.patch(s2, patch_set)).to eq(s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(s2, patch_set, :unpatch)).to eq(s1) + expect(Diff::LCS.unpatch!(s2, patch_set)).to eq(s1) + end + end + + describe "using a Diff::LCS.diff patchset" do + describe "an empty patchset returns the source" do + it "works on a string (hello)" do + diff = Diff::LCS.diff(hello, hello) + expect(Diff::LCS.patch(hello, diff)).to eq(hello) + end + + it "works on an array %W(h e l l o)" do + diff = Diff::LCS.diff(hello_ary, hello_ary) + expect(Diff::LCS.patch(hello_ary, diff)).to eq(hello_ary) + end + end + + describe "with default diff callbacks (DiffCallbacks)" do + describe "forward (s1 -> s2)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:patch_set) { Diff::LCS.diff(seq1, seq2) } + end + end + + describe "reverse (s2 -> s1)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq2 } + let(:s2) { seq1 } + let(:patch_set) { Diff::LCS.diff(seq2, seq1) } + end + end + end + + describe "with context diff callbacks (ContextDiffCallbacks)" do + describe "forward (s1 -> s2)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:patch_set) { + Diff::LCS.diff(seq1, seq2, Diff::LCS::ContextDiffCallbacks) + } + end + end + + describe "reverse (s2 -> s1)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq2 } + let(:s2) { seq1 } + let(:patch_set) { + Diff::LCS.diff(seq2, seq1, Diff::LCS::ContextDiffCallbacks) + } + end + end + end + + describe "with sdiff callbacks (SDiffCallbacks)" do + describe "forward (s1 -> s2)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:patch_set) { + Diff::LCS.diff(seq1, seq2, Diff::LCS::SDiffCallbacks) + } + end + end + + describe "reverse (s2 -> s1)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq2 } + let(:s2) { seq1 } + let(:patch_set) { + Diff::LCS.diff(seq2, seq1, Diff::LCS::SDiffCallbacks) + } + end + end + end + end + + describe "using a Diff::LCS.sdiff patchset" do + describe "an empty patchset returns the source" do + it "works on a string (hello)" do + expect(Diff::LCS.patch(hello, Diff::LCS.sdiff(hello, hello))).to eq(hello) + end + + it "works on an array %W(h e l l o)" do + expect(Diff::LCS.patch(hello_ary, Diff::LCS.sdiff(hello_ary, hello_ary))).to eq(hello_ary) + end + end + + describe "with default diff callbacks (DiffCallbacks)" do + describe "forward (s1 -> s2)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:patch_set) { + Diff::LCS.sdiff(seq1, seq2, Diff::LCS::DiffCallbacks) + } + end + end + + describe "reverse (s2 -> s1)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq2 } + let(:s2) { seq1 } + let(:patch_set) { + Diff::LCS.sdiff(seq2, seq1, Diff::LCS::DiffCallbacks) + } + end + end + end + + describe "with context diff callbacks (DiffCallbacks)" do + describe "forward (s1 -> s2)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:patch_set) { + Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks) + } + end + end + + describe "reverse (s2 -> s1)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq2 } + let(:s2) { seq1 } + let(:patch_set) { + Diff::LCS.sdiff(seq2, seq1, Diff::LCS::ContextDiffCallbacks) + } + end + end + end + + describe "with sdiff callbacks (SDiffCallbacks)" do + describe "forward (s1 -> s2)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:patch_set) { Diff::LCS.sdiff(seq1, seq2) } + end + end + + describe "reverse (s2 -> s1)" do + it_has_behavior "patch sequences correctly" do + let(:s1) { seq2 } + let(:s2) { seq1 } + let(:patch_set) { Diff::LCS.sdiff(seq2, seq1) } + end + end + end + end + + # Note: because of the error in autodiscovery ("does not autodiscover s1 + # to s2 patches"), this cannot use the "patch sequences correctly" shared + # set. Once the bug in autodiscovery is fixed, this can be converted as + # above. + describe "fix bug 891: patchsets do not contain the last equal part" do + before :each do + @s1 = %w[a b c d e f g h i j k] # standard:disable Layout/SpaceInsideArrayPercentLiteral + @s2 = %w[a b c d D e f g h i j k] + end + + describe "using Diff::LCS.diff with default diff callbacks" do + before :each do + @patch_set_s1_s2 = Diff::LCS.diff(@s1, @s2) + @patch_set_s2_s1 = Diff::LCS.diff(@s2, @s1) + end + + it "autodiscovers s1 to s2 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 the left-to-right patches" do + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1)).to eq(@s1) + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2)).to eq(@s1) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2, :patch)).to eq(@s2) + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1, :patch)).to eq(@s1) + expect(Diff::LCS.patch!(@s1, @patch_set_s1_s2)).to eq(@s2) + expect(Diff::LCS.patch!(@s2, @patch_set_s2_s1)).to eq(@s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2, :unpatch)).to eq(@s1) + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1, :unpatch)).to eq(@s2) + expect(Diff::LCS.unpatch!(@s2, @patch_set_s1_s2)).to eq(@s1) + expect(Diff::LCS.unpatch!(@s1, @patch_set_s2_s1)).to eq(@s2) + end + end + + describe "using Diff::LCS.diff with context diff callbacks" do + before :each do + @patch_set_s1_s2 = Diff::LCS.diff(@s1, @s2, Diff::LCS::ContextDiffCallbacks) + @patch_set_s2_s1 = Diff::LCS.diff(@s2, @s1, Diff::LCS::ContextDiffCallbacks) + end + + it "autodiscovers s1 to s2 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 the left-to-right patches" do + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1)).to eq(@s1) + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2)).to eq(@s1) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2, :patch)).to eq(@s2) + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1, :patch)).to eq(@s1) + expect(Diff::LCS.patch!(@s1, @patch_set_s1_s2)).to eq(@s2) + expect(Diff::LCS.patch!(@s2, @patch_set_s2_s1)).to eq(@s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2, :unpatch)).to eq(@s1) + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1, :unpatch)).to eq(@s2) + expect(Diff::LCS.unpatch!(@s2, @patch_set_s1_s2)).to eq(@s1) + expect(Diff::LCS.unpatch!(@s1, @patch_set_s2_s1)).to eq(@s2) + end + end + + describe "using Diff::LCS.diff with sdiff callbacks" do + before(:each) do + @patch_set_s1_s2 = Diff::LCS.diff(@s1, @s2, Diff::LCS::SDiffCallbacks) + @patch_set_s2_s1 = Diff::LCS.diff(@s2, @s1, Diff::LCS::SDiffCallbacks) + end + + it "autodiscovers s1 to s2 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 the left-to-right patches" do + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1)).to eq(@s1) + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2)).to eq(@s1) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2, :patch)).to eq(@s2) + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1, :patch)).to eq(@s1) + expect(Diff::LCS.patch!(@s1, @patch_set_s1_s2)).to eq(@s2) + expect(Diff::LCS.patch!(@s2, @patch_set_s2_s1)).to eq(@s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2, :unpatch)).to eq(@s1) + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1, :unpatch)).to eq(@s2) + expect(Diff::LCS.unpatch!(@s2, @patch_set_s1_s2)).to eq(@s1) + expect(Diff::LCS.unpatch!(@s1, @patch_set_s2_s1)).to eq(@s2) + end + end + + describe "using Diff::LCS.sdiff with default sdiff callbacks" do + before(:each) do + @patch_set_s1_s2 = Diff::LCS.sdiff(@s1, @s2) + @patch_set_s2_s1 = Diff::LCS.sdiff(@s2, @s1) + end + + it "autodiscovers s1 to s2 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 the left-to-right patches" do + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1)).to eq(@s1) + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2)).to eq(@s1) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2, :patch)).to eq(@s2) + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1, :patch)).to eq(@s1) + expect(Diff::LCS.patch!(@s1, @patch_set_s1_s2)).to eq(@s2) + expect(Diff::LCS.patch!(@s2, @patch_set_s2_s1)).to eq(@s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2, :unpatch)).to eq(@s1) + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1, :unpatch)).to eq(@s2) + expect(Diff::LCS.unpatch!(@s2, @patch_set_s1_s2)).to eq(@s1) + expect(Diff::LCS.unpatch!(@s1, @patch_set_s2_s1)).to eq(@s2) + end + end + + describe "using Diff::LCS.sdiff with context diff callbacks" do + before(:each) do + @patch_set_s1_s2 = Diff::LCS.sdiff(@s1, @s2, Diff::LCS::ContextDiffCallbacks) + @patch_set_s2_s1 = Diff::LCS.sdiff(@s2, @s1, Diff::LCS::ContextDiffCallbacks) + end + + it "autodiscovers s1 to s2 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 the left-to-right patches" do + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1)).to eq(@s1) + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2)).to eq(@s1) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2, :patch)).to eq(@s2) + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1, :patch)).to eq(@s1) + expect(Diff::LCS.patch!(@s1, @patch_set_s1_s2)).to eq(@s2) + expect(Diff::LCS.patch!(@s2, @patch_set_s2_s1)).to eq(@s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2, :unpatch)).to eq(@s1) + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1, :unpatch)).to eq(@s2) + expect(Diff::LCS.unpatch!(@s2, @patch_set_s1_s2)).to eq(@s1) + expect(Diff::LCS.unpatch!(@s1, @patch_set_s2_s1)).to eq(@s2) + end + end + + describe "using Diff::LCS.sdiff with default diff callbacks" do + before(:each) do + @patch_set_s1_s2 = Diff::LCS.sdiff(@s1, @s2, Diff::LCS::DiffCallbacks) + @patch_set_s2_s1 = Diff::LCS.sdiff(@s2, @s1, Diff::LCS::DiffCallbacks) + end + + it "autodiscovers s1 to s2 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 patches" do + expect do + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1)).to eq(@s2) + end.to_not raise_error + end + + it "autodiscovers s2 to s1 the left-to-right patches" do + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1)).to eq(@s1) + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2)).to eq(@s1) + end + + it "correctly patches left-to-right (explicit patch)" do + expect(Diff::LCS.patch(@s1, @patch_set_s1_s2, :patch)).to eq(@s2) + expect(Diff::LCS.patch(@s2, @patch_set_s2_s1, :patch)).to eq(@s1) + expect(Diff::LCS.patch!(@s1, @patch_set_s1_s2)).to eq(@s2) + expect(Diff::LCS.patch!(@s2, @patch_set_s2_s1)).to eq(@s1) + end + + it "correctly patches right-to-left (explicit unpatch)" do + expect(Diff::LCS.patch(@s2, @patch_set_s1_s2, :unpatch)).to eq(@s1) + expect(Diff::LCS.patch(@s1, @patch_set_s2_s1, :unpatch)).to eq(@s2) + expect(Diff::LCS.unpatch!(@s2, @patch_set_s1_s2)).to eq(@s1) + expect(Diff::LCS.unpatch!(@s1, @patch_set_s2_s1)).to eq(@s2) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/sdiff_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/sdiff_spec.rb new file mode 100644 index 000000000..aded30176 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/sdiff_spec.rb @@ -0,0 +1,216 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe "Diff::LCS.sdiff" do + include Diff::LCS::SpecHelper::Matchers + + shared_examples "compare sequences correctly" do + it "compares s1 -> s2 correctly" do + expect(Diff::LCS.sdiff(s1, s2)).to eq(context_diff(result)) + end + + it "compares s2 -> s1 correctly" do + expect(Diff::LCS.sdiff(s2, s1)).to eq(context_diff(reverse_sdiff(result))) + end + end + + describe "using seq1 & seq2" do + let(:s1) { seq1 } + let(:s2) { seq2 } + let(:result) { correct_forward_sdiff } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(abc def yyy xxx ghi jkl) & %w(abc dxf xxx ghi jkl)" do + let(:s1) { %w[abc def yyy xxx ghi jkl] } + let(:s2) { %w[abc dxf xxx ghi jkl] } + let(:result) { + # standard:disable Layout/ExtraSpacing + [ + ["=", [0, "abc"], [0, "abc"]], + ["!", [1, "def"], [1, "dxf"]], + ["-", [2, "yyy"], [2, nil]], + ["=", [3, "xxx"], [2, "xxx"]], + ["=", [4, "ghi"], [3, "ghi"]], + ["=", [5, "jkl"], [4, "jkl"]] + ] + # standard:enable Layout/ExtraSpacing + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(a b c d e) & %w(a e)" do + let(:s1) { %w[a b c d e] } + let(:s2) { %w[a e] } + let(:result) { + [ + ["=", [0, "a"], [0, "a"]], + ["-", [1, "b"], [1, nil]], + ["-", [2, "c"], [1, nil]], + ["-", [3, "d"], [1, nil]], + ["=", [4, "e"], [1, "e"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(a e) & %w(a b c d e)" do + let(:s1) { %w[a e] } + let(:s2) { %w[a b c d e] } + let(:result) { + [ + ["=", [0, "a"], [0, "a"]], + ["+", [1, nil], [1, "b"]], + ["+", [1, nil], [2, "c"]], + ["+", [1, nil], [3, "d"]], + ["=", [1, "e"], [4, "e"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(v x a e) & %w(w y a b c d e)" do + let(:s1) { %w[v x a e] } + let(:s2) { %w[w y a b c d e] } + let(:result) { + [ + ["!", [0, "v"], [0, "w"]], + ["!", [1, "x"], [1, "y"]], + ["=", [2, "a"], [2, "a"]], + ["+", [3, nil], [3, "b"]], + ["+", [3, nil], [4, "c"]], + ["+", [3, nil], [5, "d"]], + ["=", [3, "e"], [6, "e"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(x a e) & %w(a b c d e)" do + let(:s1) { %w[x a e] } + let(:s2) { %w[a b c d e] } + let(:result) { + [ + ["-", [0, "x"], [0, nil]], + ["=", [1, "a"], [0, "a"]], + ["+", [2, nil], [1, "b"]], + ["+", [2, nil], [2, "c"]], + ["+", [2, nil], [3, "d"]], + ["=", [2, "e"], [4, "e"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(a e) & %w(x a b c d e)" do + let(:s1) { %w[a e] } + let(:s2) { %w[x a b c d e] } + let(:result) { + [ + ["+", [0, nil], [0, "x"]], + ["=", [0, "a"], [1, "a"]], + ["+", [1, nil], [2, "b"]], + ["+", [1, nil], [3, "c"]], + ["+", [1, nil], [4, "d"]], + ["=", [1, "e"], [5, "e"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(a e v) & %w(x a b c d e w x)" do + let(:s1) { %w[a e v] } + let(:s2) { %w[x a b c d e w x] } + let(:result) { + [ + ["+", [0, nil], [0, "x"]], + ["=", [0, "a"], [1, "a"]], + ["+", [1, nil], [2, "b"]], + ["+", [1, nil], [3, "c"]], + ["+", [1, nil], [4, "d"]], + ["=", [1, "e"], [5, "e"]], + ["!", [2, "v"], [6, "w"]], + ["+", [3, nil], [7, "x"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w() & %w(a b c)" do + let(:s1) { %w[] } + let(:s2) { %w[a b c] } + let(:result) { + [ + ["+", [0, nil], [0, "a"]], + ["+", [0, nil], [1, "b"]], + ["+", [0, nil], [2, "c"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(a b c) & %w(1)" do + let(:s1) { %w[a b c] } + let(:s2) { %w[1] } + let(:result) { + [ + ["!", [0, "a"], [0, "1"]], + ["-", [1, "b"], [1, nil]], + ["-", [2, "c"], [1, nil]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(a b c) & %w(c)" do + let(:s1) { %w[a b c] } + let(:s2) { %w[c] } + let(:result) { + [ + ["-", [0, "a"], [0, nil]], + ["-", [1, "b"], [0, nil]], + ["=", [2, "c"], [0, "c"]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using %w(abcd efgh ijkl mnop) & []" do + let(:s1) { %w[abcd efgh ijkl mnop] } + let(:s2) { [] } + let(:result) { + [ + ["-", [0, "abcd"], [0, nil]], + ["-", [1, "efgh"], [0, nil]], + ["-", [2, "ijkl"], [0, nil]], + ["-", [3, "mnop"], [0, nil]] + ] + } + + it_has_behavior "compare sequences correctly" + end + + describe "using [[1,2]] & []" do + let(:s1) { [[1, 2]] } + let(:s2) { [] } + let(:result) { + [ + ["-", [0, [1, 2]], [0, nil]] + ] + } + + it_has_behavior "compare sequences correctly" + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/spec_helper.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/spec_helper.rb new file mode 100644 index 000000000..baaa3d03c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/spec_helper.rb @@ -0,0 +1,376 @@ +# frozen_string_literal: true + +require "rubygems" +require "pathname" + +require "psych" if RUBY_VERSION >= "1.9" + +if ENV["COVERAGE"] + require "simplecov" + require "simplecov-lcov" + + SimpleCov::Formatter::LcovFormatter.config do |config| + config.report_with_single_file = true + config.lcov_file_name = "lcov.info" + end + + SimpleCov.start "test_frameworks" do + enable_coverage :branch + primary_coverage :branch + formatter SimpleCov::Formatter::MultiFormatter.new([ + SimpleCov::Formatter::HTMLFormatter, + SimpleCov::Formatter::LcovFormatter, + SimpleCov::Formatter::SimpleFormatter + ]) + end +end + +file = Pathname.new(__FILE__).expand_path +path = file.parent +parent = path.parent + +$:.unshift parent.join("lib") + +module CaptureSubprocessIO + def _synchronize + yield + end + + def capture_subprocess_io + _synchronize { _capture_subprocess_io { yield } } + end + + def _capture_subprocess_io + require "tempfile" + + captured_stdout, captured_stderr = Tempfile.new("out"), Tempfile.new("err") + + orig_stdout, orig_stderr = $stdout.dup, $stderr.dup + $stdout.reopen captured_stdout + $stderr.reopen captured_stderr + + yield + + $stdout.rewind + $stderr.rewind + + [captured_stdout.read, captured_stderr.read] + ensure + captured_stdout.unlink + captured_stderr.unlink + $stdout.reopen orig_stdout + $stderr.reopen orig_stderr + end + private :_capture_subprocess_io +end + +require "diff-lcs" + +module Diff::LCS::SpecHelper + def hello + "hello" + end + + def hello_ary + %w[h e l l o] + end + + def seq1 + %w[a b c e h j l m n p] + end + + def skipped_seq1 + %w[a h n p] + end + + def seq2 + %w[b c d e f j k l m r s t] + end + + def skipped_seq2 + %w[d f k r s t] + end + + def word_sequence + %w[abcd efgh ijkl mnopqrstuvwxyz] + end + + def correct_lcs + %w[b c e j l m] + end + + # standard:disable Layout/ExtraSpacing + def correct_forward_diff + [ + [ + ["-", 0, "a"] + ], + [ + ["+", 2, "d"] + ], + [ + ["-", 4, "h"], + ["+", 4, "f"] + ], + [ + ["+", 6, "k"] + ], + [ + ["-", 8, "n"], + ["+", 9, "r"], + ["-", 9, "p"], + ["+", 10, "s"], + ["+", 11, "t"] + ] + ] + end + + def correct_backward_diff + [ + [ + ["+", 0, "a"] + ], + [ + ["-", 2, "d"] + ], + [ + ["-", 4, "f"], + ["+", 4, "h"] + ], + [ + ["-", 6, "k"] + ], + [ + ["-", 9, "r"], + ["+", 8, "n"], + ["-", 10, "s"], + ["+", 9, "p"], + ["-", 11, "t"] + ] + ] + end + + def correct_forward_sdiff + [ + ["-", [0, "a"], [0, nil]], + ["=", [1, "b"], [0, "b"]], + ["=", [2, "c"], [1, "c"]], + ["+", [3, nil], [2, "d"]], + ["=", [3, "e"], [3, "e"]], + ["!", [4, "h"], [4, "f"]], + ["=", [5, "j"], [5, "j"]], + ["+", [6, nil], [6, "k"]], + ["=", [6, "l"], [7, "l"]], + ["=", [7, "m"], [8, "m"]], + ["!", [8, "n"], [9, "r"]], + ["!", [9, "p"], [10, "s"]], + ["+", [10, nil], [11, "t"]] + ] + end + # standard:enable Layout/ExtraSpacing + + def reverse_sdiff(forward_sdiff) + forward_sdiff.map { |line| + line[1], line[2] = line[2], line[1] + case line[0] + when "-" then line[0] = "+" + when "+" then line[0] = "-" + end + line + } + end + + def change_diff(diff) + map_diffs(diff, Diff::LCS::Change) + end + + def context_diff(diff) + map_diffs(diff, Diff::LCS::ContextChange) + end + + def format_diffs(diffs) + diffs.map { |e| + if e.is_a?(Array) + e.map { |f| f.to_a.join }.join(", ") + else + e.to_a.join + end + }.join("\n") + end + + def map_diffs(diffs, klass = Diff::LCS::ContextChange) + diffs.map do |chunks| + if klass == Diff::LCS::ContextChange + klass.from_a(chunks) + else + chunks.map { |changes| klass.from_a(changes) } + end + end + end + + def balanced_traversal(s1, s2, callback_type) + callback = __send__(callback_type) + Diff::LCS.traverse_balanced(s1, s2, callback) + callback + end + + def balanced_reverse(change_result) + new_result = [] + change_result.each do |line| + line = [line[0], line[2], line[1]] + case line[0] + when "<" + line[0] = ">" + when ">" + line[0] = "<" + end + new_result << line + end + new_result.sort_by { |line| [line[1], line[2]] } + end + + def map_to_no_change(change_result) + new_result = [] + change_result.each do |line| + case line[0] + when "!" + new_result << ["<", line[1], line[2]] + new_result << [">", line[1] + 1, line[2]] + else + new_result << line + end + end + new_result + end + + class SimpleCallback + def initialize + reset + end + + attr_reader :matched_a + attr_reader :matched_b + attr_reader :discards_a + attr_reader :discards_b + attr_reader :done_a + attr_reader :done_b + + def reset + @matched_a = [] + @matched_b = [] + @discards_a = [] + @discards_b = [] + @done_a = [] + @done_b = [] + self + end + + def match(event) + @matched_a << event.old_element + @matched_b << event.new_element + end + + def discard_b(event) + @discards_b << event.new_element + end + + def discard_a(event) + @discards_a << event.old_element + end + + def finished_a(event) + @done_a << [ + event.old_element, event.old_position, + event.new_element, event.new_position + ] + end + + def finished_b(event) + @done_b << [ + event.old_element, event.old_position, + event.new_element, event.new_position + ] + end + end + + def simple_callback + SimpleCallback.new + end + + class SimpleCallbackNoFinishers < SimpleCallback + undef :finished_a + undef :finished_b + end + + def simple_callback_no_finishers + SimpleCallbackNoFinishers.new + end + + class BalancedCallback + def initialize + reset + end + + attr_reader :result + + def reset + @result = [] + end + + def match(event) + @result << ["=", event.old_position, event.new_position] + end + + def discard_a(event) + @result << ["<", event.old_position, event.new_position] + end + + def discard_b(event) + @result << [">", event.old_position, event.new_position] + end + + def change(event) + @result << ["!", event.old_position, event.new_position] + end + end + + def balanced_callback + BalancedCallback.new + end + + class BalancedCallbackNoChange < BalancedCallback + undef :change + end + + def balanced_callback_no_change + BalancedCallbackNoChange.new + end + + module Matchers + extend RSpec::Matchers::DSL + + matcher :be_nil_or_match_values do |ii, s1, s2| + match do |ee| + expect(ee).to(satisfy { |vee| vee.nil? || s1[ii] == s2[ee] }) + end + end + + matcher :correctly_map_sequence do |s1| + match do |actual| + actual.each_index { |ii| expect(actual[ii]).to be_nil_or_match_values(ii, s1, @s2) } + end + + chain :to_other_sequence do |s2| + @s2 = s2 + end + end + end +end + +RSpec.configure do |conf| + conf.include Diff::LCS::SpecHelper + conf.alias_it_should_behave_like_to :it_has_behavior, "has behavior:" + # standard:disable Style/HashSyntax + conf.filter_run_excluding :broken => true + # standard:enable Style/HashSyntax +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_balanced_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_balanced_spec.rb new file mode 100644 index 000000000..3a3f67797 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_balanced_spec.rb @@ -0,0 +1,312 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe "Diff::LCS.traverse_balanced" do + include Diff::LCS::SpecHelper::Matchers + + shared_examples "with a #change callback" do |s1, s2, result| + it "traverses s1 -> s2 correctly" do + traversal = balanced_traversal(s1, s2, :balanced_callback) + expect(traversal.result).to eq(result) + end + + it "traverses s2 -> s1 correctly" do + traversal = balanced_traversal(s2, s1, :balanced_callback) + expect(traversal.result).to eq(balanced_reverse(result)) + end + end + + shared_examples "without a #change callback" do |s1, s2, result| + it "traverses s1 -> s2 correctly" do + traversal = balanced_traversal(s1, s2, :balanced_callback_no_change) + expect(traversal.result).to eq(map_to_no_change(result)) + end + + it "traverses s2 -> s1 correctly" do + traversal = balanced_traversal(s2, s1, :balanced_callback_no_change) + expect(traversal.result).to eq(map_to_no_change(balanced_reverse(result))) + end + end + + describe "identical string sequences ('abc')" do + s1 = s2 = "abc" + + result = [ + ["=", 0, 0], + ["=", 1, 1], + ["=", 2, 2] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "identical array sequences %w(a b c)" do + s1 = s2 = %w[a b c] + + result = [ + ["=", 0, 0], + ["=", 1, 1], + ["=", 2, 2] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(a b c) & %w(a x c)" do + s1 = %w[a b c] + s2 = %w[a x c] + + result = [ + ["=", 0, 0], + ["!", 1, 1], + ["=", 2, 2] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(a x y c) & %w(a v w c)" do + s1 = %w[a x y c] + s2 = %w[a v w c] + + result = [ + ["=", 0, 0], + ["!", 1, 1], + ["!", 2, 2], + ["=", 3, 3] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(x y c) & %w(v w c)" do + s1 = %w[x y c] + s2 = %w[v w c] + result = [ + ["!", 0, 0], + ["!", 1, 1], + ["=", 2, 2] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(a x y z) & %w(b v w)" do + s1 = %w[a x y z] + s2 = %w[b v w] + result = [ + ["!", 0, 0], + ["!", 1, 1], + ["!", 2, 2], + ["<", 3, 3] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(a z) & %w(a)" do + s1 = %w[a z] + s2 = %w[a] + result = [ + ["=", 0, 0], + ["<", 1, 1] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(z a) & %w(a)" do + s1 = %w[z a] + s2 = %w[a] + result = [ + ["<", 0, 0], + ["=", 1, 0] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(a b c) & %w(x y z)" do + s1 = %w[a b c] + s2 = %w[x y z] + result = [ + ["!", 0, 0], + ["!", 1, 1], + ["!", 2, 2] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "sequences %w(abcd efgh ijkl mnoopqrstuvwxyz) & []" do + s1 = %w[abcd efgh ijkl mnopqrstuvwxyz] + s2 = [] + result = [ + ["<", 0, 0], + ["<", 1, 0], + ["<", 2, 0], + ["<", 3, 0] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(a b c) & %q(a x c)" do + s1 = "a b c" + s2 = "a x c" + + result = [ + ["=", 0, 0], + ["=", 1, 1], + ["!", 2, 2], + ["=", 3, 3], + ["=", 4, 4] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(a x y c) & %q(a v w c)" do + s1 = "a x y c" + s2 = "a v w c" + + result = [ + ["=", 0, 0], + ["=", 1, 1], + ["!", 2, 2], + ["=", 3, 3], + ["!", 4, 4], + ["=", 5, 5], + ["=", 6, 6] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(x y c) & %q(v w c)" do + s1 = "x y c" + s2 = "v w c" + result = [ + ["!", 0, 0], + ["=", 1, 1], + ["!", 2, 2], + ["=", 3, 3], + ["=", 4, 4] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(a x y z) & %q(b v w)" do + s1 = "a x y z" + s2 = "b v w" + result = [ + ["!", 0, 0], + ["=", 1, 1], + ["!", 2, 2], + ["=", 3, 3], + ["!", 4, 4], + ["<", 5, 5], + ["<", 6, 5] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(a z) & %q(a)" do + s1 = "a z" + s2 = "a" + result = [ + ["=", 0, 0], + ["<", 1, 1], + ["<", 2, 1] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(z a) & %q(a)" do + s1 = "z a" + s2 = "a" + result = [ + ["<", 0, 0], + ["<", 1, 0], + ["=", 2, 0] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(a b c) & %q(x y z)" do + s1 = "a b c" + s2 = "x y z" + result = [ + ["!", 0, 0], + ["=", 1, 1], + ["!", 2, 2], + ["=", 3, 3], + ["!", 4, 4] + ] + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end + + describe "strings %q(abcd efgh ijkl mnopqrstuvwxyz) & %q()" do + s1 = "abcd efgh ijkl mnopqrstuvwxyz" + s2 = "" + # standard:disable Layout/ExtraSpacing + result = [ + ["<", 0, 0], + ["<", 1, 0], + ["<", 2, 0], + ["<", 3, 0], + ["<", 4, 0], + ["<", 5, 0], + ["<", 6, 0], + ["<", 7, 0], + ["<", 8, 0], + ["<", 9, 0], + ["<", 10, 0], + ["<", 11, 0], + ["<", 12, 0], + ["<", 13, 0], + ["<", 14, 0], + ["<", 15, 0], + ["<", 16, 0], + ["<", 17, 0], + ["<", 18, 0], + ["<", 19, 0], + ["<", 20, 0], + ["<", 21, 0], + ["<", 22, 0], + ["<", 23, 0], + ["<", 24, 0], + ["<", 25, 0], + ["<", 26, 0], + ["<", 27, 0], + ["<", 28, 0] + ] + # standard:enable Layout/ExtraSpacing + + it_has_behavior "with a #change callback", s1, s2, result + it_has_behavior "without a #change callback", s1, s2, result + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_sequences_spec.rb b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_sequences_spec.rb new file mode 100644 index 000000000..8e9928f17 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/diff-lcs-1.6.2/spec/traverse_sequences_spec.rb @@ -0,0 +1,137 @@ +# frozen_string_literal: true + +require "spec_helper" + +describe "Diff::LCS.traverse_sequences" do + describe "callback with no finishers" do + describe "over (seq1, seq2)" do + before(:each) do + @callback_s1_s2 = simple_callback_no_finishers + Diff::LCS.traverse_sequences(seq1, seq2, @callback_s1_s2) + + @callback_s2_s1 = simple_callback_no_finishers + Diff::LCS.traverse_sequences(seq2, seq1, @callback_s2_s1) + end + + it "has the correct LCS result on left-matches" do + expect(@callback_s1_s2.matched_a).to eq(correct_lcs) + expect(@callback_s2_s1.matched_a).to eq(correct_lcs) + end + + it "has the correct LCS result on right-matches" do + expect(@callback_s1_s2.matched_b).to eq(correct_lcs) + expect(@callback_s2_s1.matched_b).to eq(correct_lcs) + end + + it "has the correct skipped sequences with the left sequence" do + expect(@callback_s1_s2.discards_a).to eq(skipped_seq1) + expect(@callback_s2_s1.discards_a).to eq(skipped_seq2) + end + + it "has the correct skipped sequences with the right sequence" do + expect(@callback_s1_s2.discards_b).to eq(skipped_seq2) + expect(@callback_s2_s1.discards_b).to eq(skipped_seq1) + end + + it "does not have anything done markers from the left or right sequences" do + expect(@callback_s1_s2.done_a).to be_empty + expect(@callback_s1_s2.done_b).to be_empty + expect(@callback_s2_s1.done_a).to be_empty + expect(@callback_s2_s1.done_b).to be_empty + end + end + + describe "over (hello, hello)" do + before(:each) do + @callback = simple_callback_no_finishers + Diff::LCS.traverse_sequences(hello, hello, @callback) + end + + it "has the correct LCS result on left-matches" do + expect(@callback.matched_a).to eq(hello.chars) + end + + it "has the correct LCS result on right-matches" do + expect(@callback.matched_b).to eq(hello.chars) + end + + it "has the correct skipped sequences with the left sequence" do + expect(@callback.discards_a).to be_empty + end + + it "has the correct skipped sequences with the right sequence" do + expect(@callback.discards_b).to be_empty + end + + it "does not have anything done markers from the left or right sequences" do + expect(@callback.done_a).to be_empty + expect(@callback.done_b).to be_empty + end + end + + describe "over (hello_ary, hello_ary)" do + before(:each) do + @callback = simple_callback_no_finishers + Diff::LCS.traverse_sequences(hello_ary, hello_ary, @callback) + end + + it "has the correct LCS result on left-matches" do + expect(@callback.matched_a).to eq(hello_ary) + end + + it "has the correct LCS result on right-matches" do + expect(@callback.matched_b).to eq(hello_ary) + end + + it "has the correct skipped sequences with the left sequence" do + expect(@callback.discards_a).to be_empty + end + + it "has the correct skipped sequences with the right sequence" do + expect(@callback.discards_b).to be_empty + end + + it "does not have anything done markers from the left or right sequences" do + expect(@callback.done_a).to be_empty + expect(@callback.done_b).to be_empty + end + end + end + + describe "callback with finisher" do + before(:each) do + @callback_s1_s2 = simple_callback + Diff::LCS.traverse_sequences(seq1, seq2, @callback_s1_s2) + @callback_s2_s1 = simple_callback + Diff::LCS.traverse_sequences(seq2, seq1, @callback_s2_s1) + end + + it "has the correct LCS result on left-matches" do + expect(@callback_s1_s2.matched_a).to eq(correct_lcs) + expect(@callback_s2_s1.matched_a).to eq(correct_lcs) + end + + it "has the correct LCS result on right-matches" do + expect(@callback_s1_s2.matched_b).to eq(correct_lcs) + expect(@callback_s2_s1.matched_b).to eq(correct_lcs) + end + + it "has the correct skipped sequences for the left sequence" do + expect(@callback_s1_s2.discards_a).to eq(skipped_seq1) + expect(@callback_s2_s1.discards_a).to eq(skipped_seq2) + end + + it "has the correct skipped sequences for the right sequence" do + expect(@callback_s1_s2.discards_b).to eq(skipped_seq2) + expect(@callback_s2_s1.discards_b).to eq(skipped_seq1) + end + + it "has done markers differently-sized sequences" do + expect(@callback_s1_s2.done_a).to eq([["p", 9, "t", 11]]) + expect(@callback_s1_s2.done_b).to be_empty + + expect(@callback_s2_s1.done_a).to be_empty + expect(@callback_s2_s1.done_b).to eq([["t", 11, "p", 9]]) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/dependabot.yml b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/dependabot.yml new file mode 100644 index 000000000..2fd177121 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/dependabot.yml @@ -0,0 +1,15 @@ +version: 2 + +updates: + + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + + # Maintain dependencies for Ruby's Bundler + - package-ecosystem: "bundler" + directory: "/" + schedule: + interval: "daily" diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/workflows/main.yml b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/workflows/main.yml new file mode 100644 index 000000000..7620e33a8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.github/workflows/main.yml @@ -0,0 +1,42 @@ +name: Main +on: + push: + branches: + - main + pull_request: + branches: + - main +jobs: + test: + name: 'CI Tests' + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + # Due to https://github.com/actions/runner/issues/849, we have to use quotes for '3.0' + ruby: [jruby, 2.7, '3.0', 3.1, 3.2, 3.3, head] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3.3.0 + + # Conditionally configure bundler via environment variables as advised + # * https://github.com/ruby/setup-ruby#bundle-config + - name: Set bundler environment variables + run: | + echo "BUNDLE_WITHOUT=checks" >> $GITHUB_ENV + if: matrix.ruby != 3.3 + + - uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + bundler-cache: true + + - run: bundle exec rspec + + - uses: codecov/codecov-action@v3.1.1 + with: + name: ${{ matrix.ruby }} + file: ./coverage/coverage.xml + + - run: bundle exec rubocop + if: matrix.ruby == 3.3 diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.gitignore b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.gitignore new file mode 100644 index 000000000..b1261eeb8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.gitignore @@ -0,0 +1,11 @@ +*.gem +.bundle +Gemfile.lock +pkg +.idea +doc +.yardoc +coverage +vendor +.ruby-gemset +.ruby-version diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rspec b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rspec new file mode 100644 index 000000000..b3eb8b494 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rspec @@ -0,0 +1,2 @@ +--color +--format documentation \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rubocop.yml b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rubocop.yml new file mode 100644 index 000000000..8bdb76f08 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.rubocop.yml @@ -0,0 +1,5 @@ +inherit_gem: + panolint: panolint-rubocop.yml + +require: + - rubocop-rake \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.yardopts b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.yardopts new file mode 100644 index 000000000..735a41e91 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/.yardopts @@ -0,0 +1,7 @@ +--title 'Docile Documentation' + +--no-private +--main=README.md + +--markup-provider=redcarpet +--markup=markdown diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Gemfile b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Gemfile new file mode 100644 index 000000000..43b9e2245 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Gemfile @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +source "https://rubygems.org" + +# Specify gem's runtime dependencies in docile.gemspec +gemspec + +group :test do + gem "rspec", "~> 3.10" + gem "simplecov", require: false + + # CI-only test dependencies go here + if ENV.fetch("CI", nil) == "true" + gem "simplecov-cobertura", require: false, group: "test" + end +end + +# Excluded from CI except on latest MRI Ruby, to reduce compatibility burden +group :checks do + gem "panolint", github: "panorama-ed/panolint", branch: "main" +end + +# Optional, only used locally to release to rubygems.org +group :release, optional: true do + gem "rake" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/HISTORY.md b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/HISTORY.md new file mode 100644 index 000000000..ef6781c8a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/HISTORY.md @@ -0,0 +1,134 @@ +# HISTORY + +## [Unreleased changes](http://github.com/ms-ati/docile/compare/v1.4.1...main) + +## [v1.4.1 (May 12, 2021)](http://github.com/ms-ati/docile/compare/v1.4.0...v1.4.1) + + - Special thanks to Mamoru TASAKA (@mtasaka): + - Starting point for a fix on the tests to pass on Ruby 3.3 + - Added support for Ruby 3.2 and 3.3 + - Removed support for Rubies below 2.7 + +## [v1.4.0 (May 12, 2021)](http://github.com/ms-ati/docile/compare/v1.3.5...v1.4.0) + + - Special thanks to Matt Schreiber (@tomeon): + - Short-circuit to calling #instance_exec directly on the DSL object (prior to + constructing a proxy object) when the DSL object and block context object are + identical (*Sorry it took over a year to review and merge this!*) + - Renamed default branch from master to main, see: https://github.com/github/renaming + - Temporarily removed YARD doc configuration, to replace after + migration to Github Actions + - Removed support for all EOL Rubies < 2.5 + - Migrated CI from Travis to Github Actions + - Special thanks (again!) to Taichi Ishitani (@taichi-ishitani): + - Use more reliable codecov github action (via simplecov-cobertura) + rather than less reliable codecov gem + - Enable bundle caching in github action setup-ruby + - Added Rubocop, and configured it to run in CI + - Added Dependabot, and configured it to run daily + - Added SECURITY.md for vulnerability reporting policy + +## [v1.3.5 (Jan 13, 2021)](http://github.com/ms-ati/docile/compare/v1.3.4...v1.3.5) + + - Special thanks to Jochen Seeber (@jochenseeber): + - Fix remaining delegation on Ruby 2.7 (PR #62) + - Remove support for Ruby 1.8.7 and REE, because they + [are no longer runnable on Travis CI](https://travis-ci.community/t/ruby-1-8-7-and-ree-builds-broken-by-ssl-certificate-failure/10866) + - Announce that continued support for any EOL Ruby versions (that is, versions + prior to Ruby 2.5 as of Jan 13 2021) will be decided on **Feb 1, 2021** + based on comments to [issue #58](https://github.com/ms-ati/docile/issues/58) + +## [v1.3.4 (Dec 22, 2020)](http://github.com/ms-ati/docile/compare/v1.3.3...v1.3.4) + + - Special thanks to Benoit Daloze (@eregon): + - Fix delegation on Ruby 2.7 (issues #45 and #44, PR #52) + +## [v1.3.3 (Dec 18, 2020)](http://github.com/ms-ati/docile/compare/v1.3.2...v1.3.3) + + - Special thanks (again!) to Taichi Ishitani (@taichi-ishitani): + - Fix keyword arg warnings on Ruby 2.7 (issue #44, PR #45) + - Filter Docile's source files from backtrace (issue #35, PR #36) + +## [v1.3.2 (Jun 12, 2019)](http://github.com/ms-ati/docile/compare/v1.3.1...v1.3.2) + + - Special thanks (again!) to Taichi Ishitani (@taichi-ishitani): + - Fix for DSL object is replaced when #dsl_eval is nested (#33, PR #34) + +## [v1.3.1 (May 24, 2018)](http://github.com/ms-ati/docile/compare/v1.3.0...v1.3.1) + + - Special thanks to Taichi Ishitani (@taichi-ishitani): + - Fix for when DSL object is also the block's context (#30) + +## [v1.3.0 (Feb 7, 2018)](http://github.com/ms-ati/docile/compare/v1.2.0...v1.3.0) + + - Allow helper methods in block's context to call DSL methods + - Add SemVer release policy explicitly + - Standardize on double-quoted string literals + - Workaround some more Travis CI shenanigans + +## [v1.2.0 (Jan 11, 2018)](http://github.com/ms-ati/docile/compare/v1.1.5...v1.2.0) + + - Special thanks to Christina Koller (@cmkoller) + - add DSL evaluation returning *return value of the block* (see `.dsl_eval_with_block_return`) + - add an example to README + - keep travis builds passing on old ruby versions + +## [v1.1.5 (Jun 15, 2014)](http://github.com/ms-ati/docile/compare/v1.1.4...v1.1.5) + + - as much as possible, loosen version restrictions on development dependencies + - clarify gemspec settings as much as possible + - bump rspec dependency to 3.0.x + +## [v1.1.4 (Jun 11, 2014)](http://github.com/ms-ati/docile/compare/v1.1.3...v1.1.4) + + - Special thanks to Ken Dreyer (@ktdreyer): + - make simplecov/coveralls optional for running tests \[[33834852c7](https://github.com/ms-ati/docile/commit/33834852c7849912b97e109e8c5c193579cc5e98)\] + - update URL in gemspec \[[174e654a07](https://github.com/ms-ati/docile/commit/174e654a075c8350b3411b212cfb409bc605348a)\] + +## [v1.1.3 (Feb 4, 2014)](http://github.com/ms-ati/docile/compare/v1.1.2...v1.1.3) + + - Special thanks to Alexey Vasiliev (@le0pard): + - fix problem to catch NoMethodError from non receiver object + - upgrade rspec format to new "expect" syntax + +## [v1.1.2 (Jan 10, 2014)](http://github.com/ms-ati/docile/compare/v1.1.1...v1.1.2) + + - remove unnecessarily nested proxy objects (thanks @Ajedi32)! + - documentation updates and corrections + +## [v1.1.1 (Nov 26, 2013)](http://github.com/ms-ati/docile/compare/v1.1.0...v1.1.1) + + - documentation updates and corrections + - fix Rubinius build in Travis CI + +## [v1.1.0 (Jul 29, 2013)](http://github.com/ms-ati/docile/compare/v1.0.5...v1.1.0) + + - add functional-style DSL objects via `Docile#dsl_eval_immutable` + +## [v1.0.5 (Jul 28, 2013)](http://github.com/ms-ati/docile/compare/v1.0.4...v1.0.5) + + - achieve 100% yard docs coverage + - fix rendering of docs at http://rubydoc.info/gems/docile + +## [v1.0.4 (Jul 25, 2013)](http://github.com/ms-ati/docile/compare/v1.0.3...v1.0.4) + + - simplify and clarify code + - fix a minor bug where FallbackContextProxy#instance_variables would return + symbols rather than strings on Ruby 1.8.x + +## [v1.0.3 (Jul 6, 2013)](http://github.com/ms-ati/docile/compare/v1.0.2...v1.0.3) + + - instrument code coverage via SimpleCov and publish to Coveralls.io + +## [v1.0.2 (Apr 1, 2013)](http://github.com/ms-ati/docile/compare/v1.0.1...v1.0.2) + + - allow passing parameters to DSL blocks (thanks @dslh!) + +## [v1.0.1 (Nov 29, 2012)](http://github.com/ms-ati/docile/compare/v1.0.0...v1.0.1) + + - relaxed rspec and rake dependencies to allow newer versions + - fixes to documentation + +## [v1.0.0 (Oct 29, 2012)](http://github.com/ms-ati/docile/compare/1b225c8a27...v1.0.0) + + - Initial Feature Set diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/LICENSE b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/LICENSE new file mode 100644 index 000000000..b8967f95e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2012-2024 Marc Siegel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/README.md b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/README.md new file mode 100644 index 000000000..fe095a40f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/README.md @@ -0,0 +1,409 @@ +# Docile + +[![Gem Version](https://img.shields.io/gem/v/docile.svg)](https://rubygems.org/gems/docile) +[![Gem Downloads](https://img.shields.io/gem/dt/docile.svg)](https://rubygems.org/gems/docile) + +[![Join the chat at https://gitter.im/ms-ati/docile](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/ms-ati/docile?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Yard Docs](http://img.shields.io/badge/yard-docs-blue.svg)](http://rubydoc.info/github/ms-ati/docile) + +[![Build Status](https://github.com/ms-ati/docile/actions/workflows/main.yml/badge.svg)](https://github.com/ms-ati/docile/actions/workflows/main.yml) +[![Code Coverage](https://img.shields.io/codecov/c/github/ms-ati/docile.svg)](https://codecov.io/github/ms-ati/docile) +[![Maintainability](https://api.codeclimate.com/v1/badges/79ca631bc123f7b83b34/maintainability)](https://codeclimate.com/github/ms-ati/docile/maintainability) + +Ruby makes it possible to create very expressive **Domain Specific +Languages**, or **DSL**'s for short. However, it requires some deep knowledge and +somewhat hairy meta-programming to get the interface just right. + +"Docile" means *Ready to accept control or instruction; submissive* [[1]] + +Instead of each Ruby project reinventing this wheel, let's make our Ruby DSL +coding a bit more docile... + +[1]: http://www.google.com/search?q=docile+definition "Google" + +## Usage + +### Basic: Ruby [Array](http://ruby-doc.org/core-3.0.0/Array.html) as DSL + +Let's say that we want to make a DSL for modifying Array objects. +Wouldn't it be great if we could just treat the methods of Array as a DSL? + +```ruby +with_array([]) do + push 1 + push 2 + pop + push 3 +end +#=> [1, 3] +``` + +No problem, just define the method `with_array` like this: + +```ruby +def with_array(arr=[], &block) + Docile.dsl_eval(arr, &block) +end +``` + +Easy! + +### Next step: Allow helper methods to call DSL methods + +What if, in our use of the methods of Array as a DSL, we want to extract +helper methods which in turn call DSL methods? + +```ruby +def pop_sum_and_push(n) + sum = 0 + n.times { sum += pop } + push sum +end + +Docile.dsl_eval([]) do + push 5 + push 6 + pop_sum_and_push(2) +end +#=> [11] +``` + +Without Docile, you may find this sort of code extraction to be more +challenging. + +### Wait! Can't I do that with just `instance_eval` or `instance_exec`? + +Good question! + +In short: **No**. + +Not if you want the code in the block to be able to refer to anything +the block would normally have access to from the surrounding context. + +Let's be very specific. Docile internally uses `instance_exec` (see [execution.rb#26](lib/docile/execution.rb#L26)), adding a small layer to support referencing *local variables*, *instance variables*, and *methods* from the _block's context_ **or** the target _object's context_, interchangeably. This is "**the hard part**", where most folks making a DSL in Ruby throw up their hands. + +For example: + +```ruby +class ContextOfBlock + def example_of_contexts + @block_instance_var = 1 + block_local_var = 2 + + with_array do + push @block_instance_var + push block_local_var + pop + push block_sees_this_method + end + end + + def block_sees_this_method + 3 + end + + def with_array(&block) + { + docile: Docile.dsl_eval([], &block), + instance_eval: ([].instance_eval(&block) rescue $!), + instance_exec: ([].instance_exec(&block) rescue $!) + } + end +end + +ContextOfBlock.new.example_of_contexts +#=> { + :docile=>[1, 3], + :instance_eval=>#, + :instance_exec=># + } +``` + +As you can see, it won't be possible to call methods or access instance variables defined in the block's context using just the raw `instance_eval` or `instance_exec` methods. And in fact, Docile goes further, making it easy to maintain this support even in multi-layered DSLs. + +### Build a Pizza + +Mutating (changing) an Array instance is fine, but what usually makes a good DSL is a [Builder Pattern][2]. + +For example, let's say you want a DSL to specify how you want to build a Pizza: + +```ruby +@sauce_level = :extra + +pizza do + cheese + pepperoni + sauce @sauce_level +end +#=> # +``` + +And let's say we have a PizzaBuilder, which builds a Pizza like this: + +```ruby +Pizza = Struct.new(:cheese, :pepperoni, :bacon, :sauce) + +class PizzaBuilder + def cheese(v=true); @cheese = v; self; end + def pepperoni(v=true); @pepperoni = v; self; end + def bacon(v=true); @bacon = v; self; end + def sauce(v=nil); @sauce = v; self; end + def build + Pizza.new(!!@cheese, !!@pepperoni, !!@bacon, @sauce) + end +end + +PizzaBuilder.new.cheese.pepperoni.sauce(:extra).build +#=> # +``` + +Then implement your DSL like this: + +```ruby +def pizza(&block) + Docile.dsl_eval(PizzaBuilder.new, &block).build +end +``` + +It's just that easy! + +[2]: http://stackoverflow.com/questions/328496/when-would-you-use-the-builder-pattern "Builder Pattern" + +### Multi-level and Recursive DSLs + +Docile is a very easy way to write a multi-level DSL in Ruby, even for +a [recursive data structure such as a tree][4]: + +```ruby +Person = Struct.new(:name, :mother, :father) + +person { + name 'John Smith' + mother { + name 'Mary Smith' + } + father { + name 'Tom Smith' + mother { + name 'Jane Smith' + } + } +} + +#=> #, +# father=#, +# father=nil>> +``` + +See the full [person tree example][4] for details. + +[4]: https://gist.github.com/ms-ati/2bb17bdf10a430faba98 + +### Block parameters + +Parameters can be passed to the DSL block. + +Supposing you want to make some sort of cheap [Sinatra][3] knockoff: + +```ruby +@last_request = nil +respond '/path' do |request| + puts "Request received: #{request}" + @last_request = request +end + +def ride bike + # Play with your new bike +end + +respond '/new_bike' do |bike| + ride(bike) +end +``` + +You'd put together a dispatcher something like this: + +```ruby +require 'singleton' + +class DispatchScope + def a_method_you_can_call_from_inside_the_block + :useful_huh? + end +end + +class MessageDispatch + include Singleton + + def initialize + @responders = {} + end + + def add_responder path, &block + @responders[path] = block + end + + def dispatch path, request + Docile.dsl_eval(DispatchScope.new, request, &@responders[path]) + end +end + +def respond path, &handler + MessageDispatch.instance.add_responder path, handler +end + +def send_request path, request + MessageDispatch.instance.dispatch path, request +end +``` + +[3]: http://www.sinatrarb.com "Sinatra" + +### Functional-Style Immutable DSL Objects + +Sometimes, you want to use an object as a DSL, but it doesn't quite fit the +[imperative](http://en.wikipedia.org/wiki/Imperative_programming) pattern shown +above. + +Instead of methods like +[Array#push](http://www.ruby-doc.org/core-3.0.0/Array.html#method-i-push), which +modifies the object at hand, it has methods like +[String#reverse](http://www.ruby-doc.org/core-3.0.0/String.html#method-i-reverse), +which returns a new object without touching the original. Perhaps it's even +[frozen](http://www.ruby-doc.org/core-3.0.0/Object.html#method-i-freeze) in +order to enforce [immutability](http://en.wikipedia.org/wiki/Immutable_object). + +Wouldn't it be great if we could just treat these methods as a DSL as well? + +```ruby +s = "I'm immutable!".freeze + +with_immutable_string(s) do + reverse + upcase +end +#=> "!ELBATUMMI M'I" + +s +#=> "I'm immutable!" +``` + +No problem, just define the method `with_immutable_string` like this: + +```ruby +def with_immutable_string(str="", &block) + Docile.dsl_eval_immutable(str, &block) +end +``` + +All set! + +### Accessing the block's return value + +Sometimes you might want to access the return value of your provided block, +as opposed to the DSL object itself. In these cases, use +`dsl_eval_with_block_return`. It behaves exactly like `dsl_eval`, but returns +the output from executing the block, rather than the DSL object. + +```ruby +arr = [] +with_array(arr) do + push "a" + push "b" + push "c" + length +end +#=> 3 + +arr +#=> ["a", "b", "c"] +``` + +```ruby +def with_array(arr=[], &block) + Docile.dsl_eval_with_block_return(arr, &block) +end +``` + +## Features + + 1. Method lookup falls back from the DSL object to the block's context + 2. Local variable lookup falls back from the DSL object to the block's + context + 3. Instance variables are from the block's context only + 4. Nested DSL evaluation, correctly chaining method and variable handling + from the inner to the outer DSL scopes + 5. Alternatives for both imperative and functional styles of DSL objects + +## Installation + +``` bash +$ gem install docile +``` + +## Links +* [Source](https://github.com/ms-ati/docile) +* [Documentation](http://rubydoc.info/gems/docile) +* [Bug Tracker](https://github.com/ms-ati/docile/issues) + +## Status + +Works on [all currently supported ruby versions](https://github.com/ms-ati/docile/blob/master/.github/workflows/main.yml), +or so [Github Actions](https://github.com/ms-ati/docile/actions) +tells us. + +Used by some pretty cool gems to implement their DSLs, notably including +[SimpleCov](https://github.com/colszowka/simplecov). Keep an eye out for new +gems using Docile at the +[Ruby Toolbox](https://www.ruby-toolbox.com/projects/docile). + +## Release Policy + +Docile releases follow +[Semantic Versioning 2.0.0](https://semver.org/spec/v2.0.0.html). + +## Note on Patches/Pull Requests + + * Fork the project. + * Setup your development environment with: + `gem install bundler; bundle install` + * Make your feature addition or bug fix. + * Add tests for it. This is important so I don't break it in a future version + unintentionally. + * Commit, do not mess with rakefile, version, or history. + (if you want to have your own version, that is fine but bump version in a + commit by itself I can ignore when I pull) + * Send me a pull request. Bonus points for topic branches. + +## Releasing + +To make a new release of `Docile` to +[RubyGems](https://rubygems.org/gems/docile), first install the release +dependencies (e.g. `rake`) as follows: + +```shell +bundle config set --local with 'release' +bundle install +``` + +Then carry out these steps: + +1. Update `HISTORY.md`: + - Add an entry for the upcoming version _x.y.z_ + - Move content from _Unreleased_ to the upcoming version _x.y.z_ + - Commit with title `Update HISTORY.md for x.y.z` + +2. Update `lib/docile/version.rb` + - Replace with upcoming version _x.y.z_ + - Commit with title `Bump version to x.y.z` + +3. `bundle exec rake release` + +## Copyright & License + +Copyright (c) 2012-2024 Marc Siegel. + +Licensed under the [MIT License](http://choosealicense.com/licenses/mit/), +see [LICENSE](LICENSE) for details. diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Rakefile b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Rakefile new file mode 100644 index 000000000..83e2b671a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/Rakefile @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require "rake/clean" +require "bundler/gem_tasks" +require "rspec/core/rake_task" + +# Default task for `rake` is to run rspec +task default: [:spec] + +# Use default rspec rake task +RSpec::Core::RakeTask.new + +# Configure `rake clobber` to delete all generated files +CLOBBER.include("pkg", "doc", "coverage") diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/SECURITY.md b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/SECURITY.md new file mode 100644 index 000000000..947561c00 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/SECURITY.md @@ -0,0 +1,19 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| 1.4.x | :white_check_mark: | +| 1.3.x | :white_check_mark: | +| < 1.3 | :x: | + +## Reporting a Vulnerability + +At this time, security issues and vulnerabilities in Docile should +be reported like any other issue. Please create an issue in the +[public issue tracker](https://github.com/ms-ati/docile/issues) on +Github. diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/docile.gemspec b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/docile.gemspec new file mode 100644 index 000000000..22302652e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/docile.gemspec @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require_relative "lib/docile/version" + +Gem::Specification.new do |s| + s.name = "docile" + s.version = Docile::VERSION + s.author = "Marc Siegel" + s.email = "marc@usainnov.com" + s.homepage = "https://ms-ati.github.io/docile/" + s.summary = "Docile keeps your Ruby DSLs tame and well-behaved." + s.description = "Docile treats the methods of a given ruby object as a DSL "\ + "(domain specific language) within a given block. \n\n"\ + "Killer feature: you can also reference methods, instance "\ + "variables, and local variables from the original (non-DSL) "\ + "context within the block. \n\n"\ + "Docile releases follow Semantic Versioning as defined at "\ + "semver.org." + s.license = "MIT" + + # Specify oldest supported Ruby version (2.5 to support JRuby 9.2.17.0) + s.required_ruby_version = ">= 2.5.0" + + # Files included in the gem + s.files = `git ls-files -z`.split("\x0").reject do |f| + f.match(%r{^(test|spec|features)/}) + end + s.require_paths = ["lib"] + + s.metadata = { + "homepage_uri" => "https://ms-ati.github.io/docile/", + "changelog_uri" => "https://github.com/ms-ati/docile/blob/main/HISTORY.md", + "source_code_uri" => "https://github.com/ms-ati/docile", + "rubygems_mfa_required" => "true", + } +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile.rb b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile.rb new file mode 100644 index 000000000..90e6e8403 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile.rb @@ -0,0 +1,134 @@ +# frozen_string_literal: true + +require "docile/version" +require "docile/execution" +require "docile/fallback_context_proxy" +require "docile/chaining_fallback_context_proxy" +require "docile/backtrace_filter" + +# Docile keeps your Ruby DSLs tame and well-behaved. +module Docile + extend Execution + + # Execute a block in the context of an object whose methods represent the + # commands in a DSL. + # + # @note Use with an *imperative* DSL (commands modify the context object) + # + # Use this method to execute an *imperative* DSL, which means that: + # + # 1. Each command mutates the state of the DSL context object + # 2. The return value of each command is ignored + # 3. The final return value is the original context object + # + # @example Use a String as a DSL + # Docile.dsl_eval("Hello, world!") do + # reverse! + # upcase! + # end + # #=> "!DLROW ,OLLEH" + # + # @example Use an Array as a DSL + # Docile.dsl_eval([]) do + # push 1 + # push 2 + # pop + # push 3 + # end + # #=> [1, 3] + # + # @param dsl [Object] context object whose methods make up the DSL + # @param args [Array] arguments to be passed to the block + # @param block [Proc] the block of DSL commands to be executed against the + # `dsl` context object + # @return [Object] the `dsl` context object after executing the block + def dsl_eval(dsl, *args, &block) + exec_in_proxy_context(dsl, FallbackContextProxy, *args, &block) + dsl + end + + ruby2_keywords :dsl_eval if respond_to?(:ruby2_keywords, true) + module_function :dsl_eval + + # Execute a block in the context of an object whose methods represent the + # commands in a DSL, and return *the block's return value*. + # + # @note Use with an *imperative* DSL (commands modify the context object) + # + # Use this method to execute an *imperative* DSL, which means that: + # + # 1. Each command mutates the state of the DSL context object + # 2. The return value of each command is ignored + # 3. The final return value is the original context object + # + # @example Use a String as a DSL + # Docile.dsl_eval_with_block_return("Hello, world!") do + # reverse! + # upcase! + # first + # end + # #=> "!" + # + # @example Use an Array as a DSL + # Docile.dsl_eval_with_block_return([]) do + # push "a" + # push "b" + # pop + # push "c" + # length + # end + # #=> 2 + # + # @param dsl [Object] context object whose methods make up the DSL + # @param args [Array] arguments to be passed to the block + # @param block [Proc] the block of DSL commands to be executed against the + # `dsl` context object + # @return [Object] the return value from executing the block + def dsl_eval_with_block_return(dsl, *args, &block) + exec_in_proxy_context(dsl, FallbackContextProxy, *args, &block) + end + + if respond_to?(:ruby2_keywords, true) + ruby2_keywords :dsl_eval_with_block_return + end + module_function :dsl_eval_with_block_return + + # Execute a block in the context of an immutable object whose methods, + # and the methods of their return values, represent the commands in a DSL. + # + # @note Use with a *functional* DSL (commands return successor + # context objects) + # + # Use this method to execute a *functional* DSL, which means that: + # + # 1. The original DSL context object is never mutated + # 2. Each command returns the next DSL context object + # 3. The final return value is the value returned by the last command + # + # @example Use a frozen String as a DSL + # Docile.dsl_eval_immutable("I'm immutable!".freeze) do + # reverse + # upcase + # end + # #=> "!ELBATUMMI M'I" + # + # @example Use a Float as a DSL + # Docile.dsl_eval_immutable(84.5) do + # fdiv(2) + # floor + # end + # #=> 42 + # + # @param dsl [Object] immutable context object whose methods make up the + # initial DSL + # @param args [Array] arguments to be passed to the block + # @param block [Proc] the block of DSL commands to be executed against the + # `dsl` context object and successor return values + # @return [Object] the return value of the final command in the block + def dsl_eval_immutable(dsl, *args, &block) + exec_in_proxy_context(dsl, ChainingFallbackContextProxy, *args, &block) + end + + ruby2_keywords :dsl_eval_immutable if respond_to?(:ruby2_keywords, true) + module_function :dsl_eval_immutable +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/backtrace_filter.rb b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/backtrace_filter.rb new file mode 100644 index 000000000..06a660587 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/backtrace_filter.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Docile + # @api private + # + # This is used to remove entries pointing to Docile's source files + # from {Exception#backtrace} and {Exception#backtrace_locations}. + # + # If {NoMethodError} is caught then the exception object will be extended + # by this module to add filter functionalities. + module BacktraceFilter + FILTER_PATTERN = %r{/lib/docile/}.freeze + + def backtrace + super.grep_v(FILTER_PATTERN) + end + + if ::Exception.public_method_defined?(:backtrace_locations) + def backtrace_locations + super.reject { |location| location.absolute_path =~ FILTER_PATTERN } + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/chaining_fallback_context_proxy.rb b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/chaining_fallback_context_proxy.rb new file mode 100644 index 000000000..3bc52e61b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/chaining_fallback_context_proxy.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require "docile/fallback_context_proxy" + +module Docile + # @api private + # + # Operates in the same manner as {FallbackContextProxy}, but replacing + # the primary `receiver` object with the result of each proxied method. + # + # This is useful for implementing DSL evaluation for immutable context + # objects. + # + # @see Docile.dsl_eval_immutable + # + # rubocop:disable Style/MissingRespondToMissing + class ChainingFallbackContextProxy < FallbackContextProxy + # Proxy methods as in {FallbackContextProxy#method_missing}, replacing + # `receiver` with the returned value. + def method_missing(method, *args, &block) + @__receiver__ = super + end + + ruby2_keywords :method_missing if respond_to?(:ruby2_keywords, true) + end + # rubocop:enable Style/MissingRespondToMissing +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/execution.rb b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/execution.rb new file mode 100644 index 000000000..e8a640847 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/execution.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +module Docile + # @api private + # + # A namespace for functions relating to the execution of a block against a + # proxy object. + module Execution + # Execute a block in the context of an object whose methods represent the + # commands in a DSL, using a specific proxy class. + # + # @param dsl [Object] context object whose methods make up the + # (initial) DSL + # @param proxy_type [FallbackContextProxy, ChainingFallbackContextProxy] + # which class to instantiate as proxy context + # @param args [Array] arguments to be passed to the block + # @param block [Proc] the block of DSL commands to be executed + # @return [Object] the return value of the block + def exec_in_proxy_context(dsl, proxy_type, *args, &block) + block_context = eval("self", block.binding) # rubocop:disable Style/EvalWithLocation + + # Use #equal? to test strict object identity (assuming that this dictum + # from the Ruby docs holds: "[u]nlike ==, the equal? method should never + # be overridden by subclasses as it is used to determine object + # identity") + return dsl.instance_exec(*args, &block) if dsl.equal?(block_context) + + proxy_context = proxy_type.new(dsl, block_context) + begin + block_context.instance_variables.each do |ivar| + value_from_block = block_context.instance_variable_get(ivar) + proxy_context.instance_variable_set(ivar, value_from_block) + end + + proxy_context.instance_exec(*args, &block) + ensure + if block_context.respond_to?(:__docile_undo_fallback__) + block_context.send(:__docile_undo_fallback__) + end + + block_context.instance_variables.each do |ivar| + next unless proxy_context.instance_variables.include?(ivar) + + value_from_dsl_proxy = proxy_context.instance_variable_get(ivar) + block_context.instance_variable_set(ivar, value_from_dsl_proxy) + end + end + end + + ruby2_keywords :exec_in_proxy_context if respond_to?(:ruby2_keywords, true) + module_function :exec_in_proxy_context + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/fallback_context_proxy.rb b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/fallback_context_proxy.rb new file mode 100644 index 000000000..18a2e0ea7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/fallback_context_proxy.rb @@ -0,0 +1,107 @@ +# frozen_string_literal: true + +require "set" + +module Docile + # @api private + # + # A proxy object with a primary receiver as well as a secondary + # fallback receiver. + # + # Will attempt to forward all method calls first to the primary receiver, + # and then to the fallback receiver if the primary does not handle that + # method. + # + # This is useful for implementing DSL evaluation in the context of an object. + # + # @see Docile.dsl_eval + # + # rubocop:disable Style/MissingRespondToMissing + class FallbackContextProxy + # The set of methods which will **not** be proxied, but instead answered + # by this object directly. + NON_PROXIED_METHODS = Set[:__send__, :object_id, :__id__, :==, :equal?, + :!, :!=, :instance_exec, :instance_variables, + :instance_variable_get, :instance_variable_set, + :remove_instance_variable] + + # The set of methods which will **not** fallback from the block's context + # to the dsl object. + NON_FALLBACK_METHODS = Set[:class, :self, :respond_to?, :instance_of?] + + # The set of instance variables which are local to this object and hidden. + # All other instance variables will be copied in and out of this object + # from the scope in which this proxy was created. + NON_PROXIED_INSTANCE_VARIABLES = Set[:@__receiver__, :@__fallback__] + + # Undefine all instance methods except those in {NON_PROXIED_METHODS} + instance_methods.each do |method| + undef_method(method) unless NON_PROXIED_METHODS.include?(method.to_sym) + end + + # @param [Object] receiver the primary proxy target to which all methods + # initially will be forwarded + # @param [Object] fallback the fallback proxy target to which any methods + # not handled by `receiver` will be forwarded + def initialize(receiver, fallback) + @__receiver__ = receiver + @__fallback__ = fallback + + # Enables calling DSL methods from helper methods in the block's context + unless fallback.respond_to?(:method_missing) + # NOTE: We could switch to {#define_singleton_method} on current Rubies + singleton_class = (class << fallback; self; end) + + # instrument {#method_missing} on the block's context to fallback to + # the DSL object. This allows helper methods in the block's context to + # contain calls to methods on the DSL object. + singleton_class. + send(:define_method, :method_missing) do |method, *args, &block| + m = method.to_sym + if !NON_FALLBACK_METHODS.member?(m) && + !fallback.respond_to?(m) && + receiver.respond_to?(m) + receiver.__send__(method.to_sym, *args, &block) + else + super(method, *args, &block) + end + end + + if singleton_class.respond_to?(:ruby2_keywords, true) + singleton_class.send(:ruby2_keywords, :method_missing) + end + + # instrument a helper method to remove the above instrumentation + singleton_class. + send(:define_method, :__docile_undo_fallback__) do + singleton_class.send(:remove_method, :method_missing) + singleton_class.send(:remove_method, :__docile_undo_fallback__) + end + end + end + + # @return [Array] Instance variable names, excluding + # {NON_PROXIED_INSTANCE_VARIABLES} + def instance_variables + super.reject { |v| NON_PROXIED_INSTANCE_VARIABLES.include?(v) } + end + + # Proxy all methods, excluding {NON_PROXIED_METHODS}, first to `receiver` + # and then to `fallback` if not found. + def method_missing(method, *args, &block) + if @__receiver__.respond_to?(method.to_sym) + @__receiver__.__send__(method.to_sym, *args, &block) + else + begin + @__fallback__.__send__(method.to_sym, *args, &block) + rescue NoMethodError => e + e.extend(BacktraceFilter) + raise e + end + end + end + + ruby2_keywords :method_missing if respond_to?(:ruby2_keywords, true) + end + # rubocop:enable Style/MissingRespondToMissing +end diff --git a/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/version.rb b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/version.rb new file mode 100644 index 000000000..ba8d178ab --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/docile-1.4.1/lib/docile/version.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +module Docile + # The current version of this library + VERSION = "1.4.1" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Gemfile b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Gemfile new file mode 100644 index 000000000..857c670d3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Gemfile @@ -0,0 +1,3 @@ +source "http://rubygems.org" + +gemspec \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/LICENSE.txt b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/LICENSE.txt new file mode 100644 index 000000000..c6ad1d311 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2011-14 Michael B. Klein + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/README.md b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/README.md new file mode 100644 index 000000000..825e39fee --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/README.md @@ -0,0 +1,145 @@ +# equivalent-xml + +## Description + +### Problem + +Testing XML output is difficult: + +* Comparing text output is brittle due to the vagaries of serialization. +* Attribute order doesn't matter. +* Element order matters sometimes, but not always. +* Text sometimes needs to be normalized, but CDATA doesn't. +* Nodes in the same namespace don't always use the same prefix +* Etc. + +### Solution + +EquivalentXml for Nokogiri + +[![Build Status](https://secure.travis-ci.org/mbklein/equivalent-xml.png)](http://travis-ci.org/mbklein/equivalent-xml) +[![Dependency Status](https://gemnasium.com/mbklein/equivalent-xml.png)](https://gemnasium.com/mbklein/equivalent-xml) + +### Use + EquivalentXml.equivalent?(node_1, node_2, opts = { :element_order => false, :normalize_whitespace => true }) { |n1, n2, result| ... } + +node_1 and node_2 can be any Nokogiri::XML::Node descendants (or any string +containing an XML document or document fragment). The most common use case is +to compare two Nokogiri::XML::Document instances. + +node_1 is equivalent to node_2 if and only if: + +* node_1 and node_2 are of the same class +* node_1 and node_2 are in the same namespace +* node_1 and node_2 have the same number of child nodes + (excluding ProcessingInstructions, Comments and empty Text nodes) +* For each child of node_1, there is exactly one equal child of node_2 +* If called with :element_order => true, equivalent child elements must be + in the same relative position in order to be considered equal + +If a block is given, the block will be called every time two nodes are compared. The parameters will be +the two nodes being compared as well as the result of the comparison. If the block explicitly returns +`true` or `false` (a real `TrueClass` or `FalseClass`, not just an expression that can be coerced to +true or false), the return value will override the result of the comparison. + +`Element` nodes are equivalent if they have the same name, and their +child nodesets are equal (as defined above) + +`Attribute` nodes are equivalent if their names and values match exactly + +`CDATA` nodes are equivalent if their text strings match exactly, +including leading, trailing, and internal whitespace + +Non-CDATA `CharacterData` nodes are equivalent if their text strings +match after stripping leading and trailing whitespace and collapsing +internal whitespace to a single space + +`Document` nodes are equivalent if their root nodes are equal + +`ProcessingInstruction` and `Comment` nodes are ignored + +#### Options + + :element_order => true + +Require elements to be in the same relative position in order to be +considered equivalent. + + :normalize_whitespace => false + +Don't normalize whitespace within text nodes; require text nodes to +match exactly. + + :ignore_content => ["Device > SerialNumber", "Device > ICCID"] + +A single CSS selector, or an array of CSS selectors, of nodes for which the content (text and child +nodes) should be ignored when comparing for equivalence. Defaults to `nil`. (Uses Nokogiri's +`Node#css(*rules)` to conduct the search.) + +### Using with RSpec + +EquivalentXml includes a custom matcher for RSpec (version >=1.2.4) that makes including XML +equivalencies in your spec tests a cinch! + +Add below two line to `spec_helper.rb`: + +```ruby +require 'rspec/matchers' # req by equivalent-xml custom matcher `be_equivalent_to` +require 'equivalent-xml' +``` + +Equivalency: + + expect(node_1).to be_equivalent_to(node_2) + expect(node_1).not_to be_equivalent_to(node_2) + +Chained modifiers: + + expect(node_1).to be_equivalent_to(node_2).respecting_element_order + expect(node_1).to be_equivalent_to(node_2).with_whitespace_intact + expect(node_1).to be_equivalent_to(node_2).respecting_element_order.with_whitespace_intact + expect(node_1).to be_equivalent_to(node_2).ignoring_content_of("SerialNumber") + +## Contributing to equivalent-xml + +* Check out the latest master to make sure the feature hasn't been implemented or the bug hasn't been fixed yet +* Check out the issue tracker to make sure someone already hasn't requested it and/or contributed it +* Fork the project +* Start a feature/bugfix branch +* Commit and push until you are happy with your contribution +* Make sure to add tests for it. This is important so I don't break it in a future version unintentionally. +* Please try not to mess with the Rakefile, version, or history. If you want to have your own version, or is + otherwise necessary, that is fine, but please isolate to its own commit so I can cherry-pick around it. + +## History + +- 0.6.0 - Add ability to ignore specific attributes (from paclough); remove circular dependencies (nbibler); Simplify compatibility workaround for message methods (jirutka) +- 0.5.1 - Fix false negative when comparing a Nokogiri::XML::Node to a string (introduced in 0.5.0) +- 0.5.0 - Allow to compare XML-Fragments in Strings (contrib. by webmasters) +- 0.4.4 - Fix rspec 3 deprecation warnings while maintaining compatibility with rspec 1 & 2 (w/assist from barelyknown & DanielHeath) +- 0.4.3 - Updates for rspec 3 +- 0.4.2 - Move version back into gemspec for now +- 0.4.1 - Improved RSpec version checking (contrib. by elandesign) +- 0.4.0 - Added :ignore_attr_values option (contrib. by ivannovosad) +- 0.3.0 - Added :ignore_content option (contrib. by moklett) +- 0.2.9 - Fix for rspec-rails >= 2.7 (contrib. by jcoyne) +- 0.2.8 - Allow comparison against nodesets (contrib. by gkellogg) +- 0.2.7 - Auto-require RSpec matchers if RSpec is loaded +- 0.2.6 - Added documentation for RSpec matchers +- 0.2.5 - Added YARD documentation +- 0.2.4 - Fixed comparison of non-XML input +- 0.2.3 - Improved handling of non-XML input +- 0.2.2 - Dependency update +- 0.2.1 - Hotfix: Missing files in gemspec +- 0.2.0 - Custom rspec matchers +- 0.1.6 - Allow caller to override comparison result in block +- 0.1.5 - Allow XML documents to be passed as strings, re-instituting dependency on nokogiri +- 0.1.4 - Hotfix: Missing block parameter on compare_nodesets() +- 0.1.3 - Hotfix: Missing block parameter on compare_nodesets() +- 0.1.2 - Yield evaluated nodes and provisional result if block given +- 0.1.1 - Removed explicit runtime dependency on nokogiri +- 0.1.0 - Initial release + +## Copyright + +Copyright (c) 2011-14 Michael B. Klein. See LICENSE.txt for further details. diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Rakefile b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Rakefile new file mode 100644 index 000000000..54dac9dd6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/Rakefile @@ -0,0 +1,27 @@ +require 'rubygems' +require 'bundler' +require "bundler/gem_tasks" +require 'rake/tasklib' +require 'rdoc/task' + +begin + Bundler.setup(:default, :development) +rescue Bundler::BundlerError => e + $stderr.puts e.message + $stderr.puts "Run `bundle install` to install missing gems" + exit e.status_code +end +require 'rake' + +require 'rspec/core/rake_task' +RSpec::Core::RakeTask.new +task :default => :spec + +Rake::RDocTask.new do |rdoc| + version = File.exist?('VERSION') ? File.read('VERSION') : "" + + rdoc.rdoc_dir = 'rdoc' + rdoc.title = "equivalent-xml #{version}" + rdoc.rdoc_files.include('README*') + rdoc.rdoc_files.include('lib/**/*.rb') +end diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml.rb b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml.rb new file mode 100644 index 000000000..b1688951e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml.rb @@ -0,0 +1,204 @@ +require 'nokogiri' + +module EquivalentXml + + class << self + + DEFAULT_OPTS = { :ignore_attr_values => false, :element_order => false, :normalize_whitespace => true } + + # Determine if two XML documents or nodes are equivalent + # + # @param [Nokogiri::XML::Node, Nokogiri::XML::NodeSet] node_1 The first top-level XML node to compare + # @param [Nokogiri::XML::Node, Nokogiri::XML::NodeSet] node_2 The secton top-level XML node to compare + # @param [Hash] opts Options that determine how certain comparisons are evaluated + # @option opts [Boolean] :element_order (false) Child elements must occur in the same order to be considered equivalent + # @option opts [Boolean] :normalize_whitespace (true) Collapse whitespace within Text nodes before comparing + # @option opts [String, Array] :ignore_content (nil) CSS selector(s) of nodes for which the content (text and child nodes) should be ignored when comparing for equivalence + # @yield [n1,n2,result] The two nodes currently being evaluated, and whether they are considered equivalent. The block can return true or false to override the default evaluation + # @return [Boolean] true or false + def equivalent?(node_1, node_2, opts = {}, &block) + opts = DEFAULT_OPTS.merge(opts) + if [node_1, node_2].any? { |node| node.is_a?(Nokogiri::XML::NodeSet)} + self.compare_nodesets(as_nodeset(node_1, opts), as_nodeset(node_2, opts), opts, &block) + else + # Don't let one node to coerced to a DocumentFragment if the other one is a Document + node_2 = Nokogiri::XML(node_2) if node_1.is_a?(Nokogiri::XML::Document) and !node_2.is_a?(Nokogiri::XML::Node) + node_1 = Nokogiri::XML(node_1) if node_2.is_a?(Nokogiri::XML::Document) and !node_1.is_a?(Nokogiri::XML::Node) + self.compare_nodes(as_node(node_1), as_node(node_2), opts, &block) + end + end + + def compare_nodes(node_1, node_2, opts, &block) + result = nil + if [node_1, node_2].any? { |node| not node.respond_to?(:node_type) } + result = node_1.to_s == node_2.to_s + elsif (node_1.class != node_2.class) or self.same_namespace?(node_1,node_2) == false + result = false + else + case node_1.node_type + when Nokogiri::XML::Node::DOCUMENT_NODE + result = self.compare_documents(node_1,node_2,opts,&block) + when Nokogiri::XML::Node::ELEMENT_NODE + result = self.compare_elements(node_1,node_2,opts,&block) + when Nokogiri::XML::Node::ATTRIBUTE_NODE + result = self.compare_attributes(node_1,node_2,opts,&block) + when Nokogiri::XML::Node::CDATA_SECTION_NODE + result = self.compare_cdata(node_1,node_2,opts,&block) + when Nokogiri::XML::Node::TEXT_NODE + result = self.compare_text(node_1,node_2,opts,&block) + else + result = self.compare_children(node_1,node_2,opts,&block) + end + end + if block_given? + block_result = yield(node_1, node_2, result) + if block_result.is_a?(TrueClass) or block_result.is_a?(FalseClass) + result = block_result + end + end + return result + end + + def compare_documents(node_1, node_2, opts, &block) + self.equivalent?(node_1.root,node_2.root,opts,&block) + end + + def compare_elements(node_1, node_2, opts, &block) + (node_1.name == node_2.name) && self.compare_children(node_1,node_2,opts,&block) + end + + def compare_attributes(node_1, node_2, opts, &block) + + attr_names_match = node_1.name == node_2.name + + ignore_attrs = opts[ :ignore_attr_values ] + + if ignore_attrs && (ignore_attrs.empty? || ignore_attrs.include?( node_1.name )) + attr_names_match + else + attr_names_match && (node_1.value == node_2.value) + end + end + + def compare_text(node_1, node_2, opts, &block) + if opts[:normalize_whitespace] + node_1.text.strip.gsub(/\s+/,' ') == node_2.text.strip.gsub(/\s+/,' ') + else + node_1.text == node_2.text + end + end + + def compare_cdata(node_1, node_2, opts, &block) + node_1.text == node_2.text + end + + def compare_children(node_1, node_2, opts, &block) + if ignore_content?(node_1, opts) + # Stop recursion and state a match on the children + result = true + else + nodeset_1 = as_nodeset(node_1.children, opts) + nodeset_2 = as_nodeset(node_2.children, opts) + result = self.compare_nodesets(nodeset_1,nodeset_2,opts,&block) + end + + if node_1.respond_to?(:attribute_nodes) + attributes_1 = node_1.attribute_nodes + attributes_2 = node_2.attribute_nodes + result = result && self.compare_nodesets(attributes_1,attributes_2,opts,&block) + end + result + end + + def compare_nodesets(nodeset_1, nodeset_2, opts, &block) + local_set_1 = nodeset_1.dup + local_set_2 = nodeset_2.dup + + if local_set_1.length != local_set_2.length + return false + end + + local_set_1.each do |search_node| + found_node = local_set_2.find { |test_node| self.equivalent?(search_node,test_node,opts,&block) } + + if found_node.nil? + return false + else + if search_node.is_a?(Nokogiri::XML::Element) and opts[:element_order] + if search_node.parent.elements.index(search_node) != found_node.parent.elements.index(found_node) + return false + end + end + local_set_2.delete(found_node) + end + end + return local_set_2.length == 0 + end + + # Determine if two nodes are in the same effective Namespace + # + # @param [Nokogiri::XML::Node OR String] node_1 The first node to test + # @param [Nokogiri::XML::Node OR String] node_2 The second node to test + def same_namespace?(node_1, node_2) + args = [node_1,node_2] + + # CharacterData nodes shouldn't have namespaces. But in Nokogiri, + # they do. And they're invisible. And they get corrupted easily. + # So let's wilfully ignore them. And while we're at it, let's + # ignore any class that doesn't know it has a namespace. + if args.all? { |node| not node.respond_to?(:namespace) } or + args.any? { |node| node.is_a?(Nokogiri::XML::CharacterData) } + return true + end + + href1 = node_1.namespace.nil? ? '' : node_1.namespace.href + href2 = node_2.namespace.nil? ? '' : node_2.namespace.href + return href1 == href2 + end + + private + def as_node(data) + if data.respond_to?(:node_type) + return data + else + result = Nokogiri::XML.fragment(data) + if result.respond_to?(:root) && result.root.nil? + return data + else + return result + end + end + end + + def as_nodeset(data, opts = {}) + ignore_proc = lambda do |child| + child.node_type == Nokogiri::XML::Node::COMMENT_NODE || + child.node_type == Nokogiri::XML::Node::PI_NODE || + (opts[:normalize_whitespace] && child.node_type == Nokogiri::XML::Node::TEXT_NODE && child.text.strip.empty?) + end + + if data.is_a?(Nokogiri::XML::NodeSet) + data.reject { |child| ignore_proc.call(child) } + else + result = Nokogiri::XML("#{data}") + result.root.nil? ? data : result.root.children.reject { |child| ignore_proc.call(child) } + end + end + + def ignore_content?(node, opts = {}) + ignore_list = Array(opts[:ignore_content]).flatten.compact + return false if ignore_list.empty? + + ignore_list.each do |selector| + return true if node.document.css(selector).include?(node) + end + + return false + end + end + +end + +if defined?(::RSpec::Matchers) or defined?(::Spec::Matchers) + require 'equivalent-xml/rspec_matchers' +end diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml/rspec_matchers.rb b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml/rspec_matchers.rb new file mode 100644 index 000000000..0954a5243 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/lib/equivalent-xml/rspec_matchers.rb @@ -0,0 +1,72 @@ +require 'equivalent-xml' unless defined?(::EquivalentXml) + +begin + require 'rspec/expectations' +rescue LoadError +end + +module EquivalentXml::RSpecMatchers + + if defined?(::RSpec::Matchers) + rspec_namespace = ::RSpec::Matchers + elsif defined?(::Spec::Matchers) + rspec_namespace = ::Spec::Matchers + else + raise NameError, "Cannot find Spec (rspec 1.x) or RSpec (rspec 2.x)" + end + + # Determine if the receiver is equivalent to the argument as defined + # in {file:README.rdoc README.rdoc} and {EquivalentXml.equivalent? EquivalentXml.equivalent?}. + # node.should be_equivalent_to(other_node) + # node.should_not be_equivalent_to(other_node) + # node.should be_equivalent_to(other_node).respecting_element_order + # node.should be_equivalent_to(other_node).with_whitespace_intact + # node.should be_equivalent_to(other_node).respecting_element_order.with_whitespace_intact + # node.should be_equivalent_to(other_node).ignoring_content_of("Device > SerialNumber") + def be_equivalent_to(expected) + # Placeholder method for documentation purposes; the actual + # method is defined using RSpec's matcher DSL. + end + + rspec_namespace.define :be_equivalent_to do |expected| + opts = {} + match do |actual| + EquivalentXml.equivalent?(actual,expected,opts) + end + + chain :respecting_element_order do + opts[:element_order] = true + end + + chain :with_whitespace_intact do + opts[:normalize_whitespace] = false + end + + chain :ignoring_content_of do |paths| + opts[:ignore_content] = paths + end + + chain :ignoring_attr_values do |*attrs| + opts[:ignore_attr_values] = attrs + end + + should_message = lambda do |actual| + [ 'expected:', expected.to_s, 'got:', actual.to_s ].join("\n") + end + + should_not_message = lambda do |actual| + [ 'expected:', actual.to_s, 'not to be equivalent to:', expected.to_s ].join("\n") + end + + if respond_to?(:failure_message_when_negated) + failure_message(&should_message) + failure_message_when_negated(&should_not_message) + else + failure_message_for_should(&should_message) + failure_message_for_should_not(&should_not_message) + end + + diffable + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/spec/equivalent-xml_spec.rb b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/spec/equivalent-xml_spec.rb new file mode 100644 index 000000000..63d268280 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/equivalent-xml-0.6.0/spec/equivalent-xml_spec.rb @@ -0,0 +1,255 @@ +if defined?(RUBY_ENGINE) and (RUBY_ENGINE == 'ruby') and (RUBY_VERSION >= '1.9') + require 'simplecov' + SimpleCov.start +end +$:.push(File.join(File.dirname(__FILE__),'..','lib')) +require 'nokogiri' +require 'rspec/matchers' +require 'equivalent-xml' + +describe EquivalentXml do + + it "should consider a document equivalent to itself" do + doc1 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc1) + end + + it "should compare non-XML content based on its string representation" do + expect(nil).to be_equivalent_to(nil) + expect('').to be_equivalent_to('') + expect('').to be_equivalent_to(nil) + expect('foo').to be_equivalent_to('foo') + expect('foo').not_to be_equivalent_to('bar') + doc1 = Nokogiri::XML("foo bar bazthings") + expect(doc1).not_to be_equivalent_to(nil) + end + + it "should ensure that attributes match" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).not_to be_equivalent_to(doc2) + + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2) + end + + it "shouldn't care about attribute order" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2) + end + + it "shouldn't care about element order by default" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("thingsfoo bar baz") + expect(doc1).to be_equivalent_to(doc2) + end + + it "should care about element order if :element_order => true is specified" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("thingsfoo bar baz") + expect(doc1).not_to be_equivalent_to(doc2).respecting_element_order + end + + it "should ensure nodesets have the same number of elements" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("thingsfoo bar baz") + expect(doc1).not_to be_equivalent_to(doc2) + end + + it "should ensure namespaces match" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).not_to be_equivalent_to(doc2) + end + + it "should compare namespaces based on URI, not on prefix" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2) + end + + it "should ignore declared but unused namespaces" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2) + end + + it "should normalize simple whitespace by default" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2) + end + + it "shouldn't normalize simple whitespace if :normalize_whitespace => false is specified" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).not_to be_equivalent_to(doc2).with_whitespace_intact + end + + it "should normalize complex whitespace by default" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML(%{ + things + + foo + bar baz + + }) + expect(doc1).to be_equivalent_to(doc2) + end + + it "shouldn't normalize complex whitespace if :normalize_whitespace => false is specified" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML(%{ + things + + foo + bar baz + + }) + expect(doc1).not_to be_equivalent_to(doc2).with_whitespace_intact + end + + it "should ignore comment nodes" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML(%{ + things + + + foo + bar baz + + }) + expect(doc1).to be_equivalent_to(doc2) + end + + it "should properly handle a mixture of text and element nodes" do + doc1 = Nokogiri::XML("This phrase has bold text in it.") + doc2 = Nokogiri::XML("This phrase in has bold text it.") + expect(doc1).not_to be_equivalent_to(doc2) + end + + it "should properly handle documents passed in as strings" do + doc1 = "foo bar bazthings" + doc2 = "foo bar bazthings" + expect(doc1).to be_equivalent_to(doc2) + + doc1 = "foo bar bazthings" + doc2 = "foo bar baz quuxthings" + expect(doc1).not_to be_equivalent_to(doc2) + end + + it "should compare nodesets" do + doc1 = Nokogiri::XML("foo bar bazthings") + expect(doc1.root.children).to be_equivalent_to(doc1.root.children) + end + + it "should compare nodeset with string" do + doc1 = Nokogiri::XML("foo bar bazthings") + expect(doc1.root.children).to be_equivalent_to("foo bar bazthings") + end + + context "with the :ignore_content_paths option set to a CSS selector" do + it "ignores the text content of a node that matches the given CSS selector when comparing with #equivalent?" do + doc1 = Nokogiri::XML("iPhone1234") + doc2 = Nokogiri::XML("iPhone5678") + + expect(EquivalentXml.equivalent?(doc1, doc2, :ignore_content => "SerialNumber")).to eq(true) + expect(EquivalentXml.equivalent?(doc1, doc2, :ignore_content => "Devices>Device>SerialNumber")).to eq(true) + + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of("SerialNumber") + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of("Devices>Device>SerialNumber") + end + + it "ignores the text content of a node that matches the given CSS selector when comparing with a matcher" do + doc1 = Nokogiri::XML("iPhone1234") + doc2 = Nokogiri::XML("iPhone5678") + + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of("SerialNumber") + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of("Devices>Device>SerialNumber") + end + + it "ignores all children of a node that matches the given selector when comparing for equivalence" do + doc1 = Nokogiri::XML("iPhone1234") + doc2 = Nokogiri::XML("iPad5678") + + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of("Device") + end + + it "still considers the number of elements even if they match the given CSS selector" do + doc1 = Nokogiri::XML("iPhone1234") + doc2 = Nokogiri::XML("iPhone1234iPad5678") + + expect(doc1).not_to be_equivalent_to(doc2).ignoring_content_of("Device") + end + + it "still considers attributes on the matched path when comparing for equivalence" do + doc1 = Nokogiri::XML("iPhone1234") + doc2 = Nokogiri::XML("iPhone1234") + + expect(doc1).not_to be_equivalent_to(doc2).ignoring_content_of("Device") + end + + it "ignores all matches of the CSS selector" do + doc1 = Nokogiri::XML("iPhone1001iPad2001") + doc2 = Nokogiri::XML("iPhone1002iPad2002") + + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of("SerialNumber") + end + + it "should properly compare a document to a string" do + doc1 = '' + doc2 = Nokogiri::XML doc1 + expect(doc1).to be_equivalent_to(doc2) + expect(doc2).to be_equivalent_to(doc1) + end + end + + context "with the :ignore_content_paths option set to an array of CSS selectors" do + it "ignores the content of all nodes that match any of the given CSS selectors when comparing for equivalence" do + doc1 = Nokogiri::XML("iPhone1234AAAA") + doc2 = Nokogiri::XML("iPhone5678BBBB") + + expect(doc1).to be_equivalent_to(doc2).ignoring_content_of(["SerialNumber", "ICCID"]) + end + end + + context "with :ignore_attr_values set to true" do + it "ignores the values of attributes when comparing for equivalence" do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2).ignoring_attr_values + end + end + + context 'with :ignore_attr_values receiving specific attribute names' do + it 'ignores the value of one specified attribute, but fails if other attributes are different, when comparing for equivalence' do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).not_to be_equivalent_to(doc2).ignoring_attr_values( 'order' ) + end + + it 'ignores the value of one specified attribute, but succeeds if other attributes match, when comparing for equivalence' do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2).ignoring_attr_values( 'order' ) + end + + it 'ignores the values of multiple specified attributes when comparing for equivalence' do + doc1 = Nokogiri::XML("foo bar bazthings") + doc2 = Nokogiri::XML("foo bar bazthings") + expect(doc1).to be_equivalent_to(doc2).ignoring_attr_values( 'order', 'status' ) + end + end + + context "(on fragments consisting of multiple nodes)" do + it "should compare all nodes" do + doc1 = "

Headline

Headline

" + doc2 = "

Headline

Headline2

" + expect(doc1).not_to be_equivalent_to(doc2) + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.autotest b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.autotest new file mode 100644 index 000000000..b540c90ac --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.autotest @@ -0,0 +1,5 @@ +Autotest.add_hook :initialize do |autotest| + %w(.git vendor).each do |directory| + autotest.add_exception(directory) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.gitignore b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.gitignore new file mode 100644 index 000000000..63be0c5de --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/.gitignore @@ -0,0 +1,7 @@ +/doc +/rdoc +/html +/coverage +/pkg +/.idea +*.rbc diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/CHANGELOG b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/CHANGELOG new file mode 100644 index 000000000..e626352f6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/CHANGELOG @@ -0,0 +1,215 @@ +fakeweb (1.3.0) + +* improve response header registration so you can pass an array to set a header + more than once [Myron Marston] + +* fix an exception when the response's :body option was set to nil [Chris Zingel] + +* fix that stubbed requests weren't mutating the Net::HTTP request object to set + the body and content-length, like real requests do [Chris Kampmeier] + +* add FakeWeb.last_request [Chris Kampmeier] + +* assigning a String or Regexp to FakeWeb.allow_net_connect= sets a whitelist + for outbound requests [Dan Dofter, Tim Carey-Smith, Ben Woosley] + + +fakeweb (1.2.8) + +* support Pathname objects where a filename is expected [Chris Kampmeier] + +* fix compatibility with Ruby 1.9.2 [Chris Kampmeier] + +* simplify storage of FakeWeb::VERSION [Josh Peek, Woody Peterson, Ben Woosley] + + +fakeweb (1.2.7) + +* revert to sorting query params before matching requests against regexps, + instead of the 1.2.6 behavior that tried every possible order combination; + that was factorial-time, which made matching hang for requests with long query + strings [Jason Wadsworth, David Dollar, Blaine Cook] + +* print a warning when FakeWeb is loaded before RightHttpConnection or after + Samuel, other libs that patch Net::HTTP [Chris Kampmeier, Ben Brinckerhoff] + + +fakeweb (1.2.6) + +* fix that query params in a regex would have to be sorted for it to ever match + a request URI [Chris Kampmeier, Ben Hall] + +* improve regex handling so registration with an explicit port (like + /example.com:80/) matches a request that uses an implied port + (like "http://example.com/") [Chris Kampmeier, Dan Dofter] + +* refactor URI registry to reduce duplication; now about twice as fast at + handling requests [Chris Kampmeier] + +* Add FakeWeb::VERSION so you can programmatically determine what version of + FakeWeb is loaded without using RubyGems [Chris Kampmeier, Chris Wanstrath] + + +fakeweb (1.2.5) + +* fix handling of userinfo strings that contain percent-encoded unsafe + characters [Chris Kampmeier, Ken Mayer] + +* fix that exact matches against strings/URIs with the :any method had a lower + precedence than regex matches using a real HTTP method (exact matches now + always take precedence) [Chris Kampmeier] + +* change request handling to raise an exception when more than one registered + regex matches a request URI [Chris Kampmeier] + + +fakeweb (1.2.4) + +* add experimental support for matching URIs via regular expressions + [Jacqui Maher, Tiago Albineli Motta, Peter Wagene] + +* fix an exception when registering with the :response option and a string that + is the same as the name of a directory in the current path [Chris Kampmeier] + +* DEPRECATION: Calling FakeWeb.register_uri with a :string or :file option is + now deprecated. Both options have been replaced with a unified :body option, + since they supply the response body (as opposed to :response, which supplies + the full response including headers) [Chris Kampmeier] + +* add support for specifying HTTP headers as options to FakeWeb.register_uri + when using the :string or :file response types, since those methods only + specify a response body [David Michael, Chris Kampmeier] + +* DEPRECATION: Calling FakeWeb.register_uri and FakeWeb.registered_uri? without + an HTTP method as the first argument is now deprecated. To match against any + HTTP method (the pre-1.2.0 behavior), use :any [Chris Kampmeier] + + +fakeweb (1.2.3) + +* fix the #http_version of :file and :string responses, which was returning the + request URI instead of something sensible like "1.0" [Chris Kampmeier] + +* add method aliases in the Net::HTTP patch to eliminate warnings when running + with -w [Joshua Clingenpeel] + +* fix that removing the redefinition of OpenURI::HTTPError in 1.2.0 caused + :exception responses to raise when OpenURI isn't available [Chris Kampmeier] + +* fix registering an :exception response with classes that require arguments for + instantiation, like Interrupt's subclasses [Chris Kampmeier] + + +fakeweb (1.2.2) + +* fix that HTTP Digest and OAuth requests could raise URI::InvalidURIErrors + [Bill Kocik, Chris Kampmeier] + + +fakeweb (1.2.1) + +* fix that query parameters are handled correctly when registering with a URI + object [Anselmo Alves, Chris Kampmeier] + +* fix an exception when registering with the :response option and a string + containing "\0" [Jonathan Baudanza, Chris Kampmeier] + +* fix that trailing slashes were considered significant for requests to the root + of a domain [Chris Kampmeier] + +* add support for HTTP basic authentication via userinfo strings in URIs + [Michael Bleigh] + + +fakeweb (1.2.0) + +* add lib/fakeweb.rb so you can require "fakeweb" as well [Chris Kampmeier] + +* fix compatibility with Ruby 1.9.1 [Chris Kampmeier] + +* fix that newlines in file-based responses could be doubled in the response + object's body [Mark Menard, Chris Kampmeier] + +* fix unnecessary munging of the transfer-encoding header, which improves + compatibility with mechanize [Mark Menard] + +* fix a test and the RCov dependency to be compatible with JRuby [Mark Menard] + +* remove an unnecessary redefinition of OpenURI::HTTPError [Josh Nichols] + +* rearrange implementation code into separate files, one per class [Josh Nichols] + +* fix a bug where FakeWeb.response_for would raise if the request wasn't + registered [Chris Kampmeier] + +* add HTTP method support, so FakeWeb takes both the URI and method into + account for registration, requests, and responses. Backwards-compatible with + the old method signatures, which didn't have a method param. [Chris Kampmeier] + +* start work on Ruby 1.9 compatibility [Chris Kampmeier] + +* add FakeWeb.allow_net_connect= to enable/disable the pass-through to + Net::HTTP for unregistered URIs [Mislav Marohnić, Chris Kampmeier] + +* remove setup.rb, since most people use RubyGems [Mislav Marohnić] + +* fix that 'http://example.com/?' (empty query) matches a registered + 'http://example.com/', and vice-versa [Mislav Marohnić] + +* improve the test suite to not rely on an internet connection [Chris Kampmeier] + +* use `rake test` instead of `rake tests` [Josh Nichols] + +* fix an incompatibility with Ruby 1.8.6 p36 where you'd get "Errno::EINTR: + Interrupted system call" exceptions in Socket#sysread for any non-faked + request [Chris Kampmeier] + +* response rotation: you can now optionally call FakeWeb.register_uri with an + array of options hashes; these are used, in order, to respond to + repeated requests (to repeat a response more than once before rotating, use + the :times option). Once you run out of responses, further requests always + receive the last response. [Michael Shapiro] + +* add support for Net::HTTP's undocumented full-URI request style (fixes + URI::InvalidURIErrors that you might see in older libraries) [Chris Kampmeier] + +* sort query params before storing internally, so that + http://example.com/?a=1&b=2 and http://example.com/?b=2&a=1 are considered the + same URL (although this is technically incorrect, it's much more + convenient--most web apps work that way, and Net::HTTP's use of a hash to pass + query params means that the order in which FakeWeb stores them can be + unpredictable) [Chris Kampmeier] + +* add support for ports in URLs, so that http://example.com/ and + http://example.com:3000/ are not the same [Chris Kampmeier] + +* fix for non-faked SSL requests failing with "Unable to create local socket" + [Chris Kampmeier] + +* update Rakefile to fix warning about deprecated code [Chris Kampmeier] + + +fakeweb (1.1.2) + +* add required dependencies to GemSpec to ensure that tests pass in firebrigade + (http://firebrigade.seattlerb.org/) [Blaine Cook] + + +fakeweb (1.1.1) + +* fix for non-existence of :string method on File as presented by open-uri + [Blaine Cook] + +* fix for curl example test - google redirects to ccTLDs for those outside US + [Blaine Cook] + + +fakeweb (1.1.0) + +* update code to correspond to ruby 1.8.4 (breaks compatibility with ruby 1.8.2) + [Blaine Cook] + + +fakeweb (1.0.0) + + * initial import [Blaine Cook] diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/LICENSE.txt b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/LICENSE.txt new file mode 100644 index 000000000..ecae84703 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright 2006-2010 Blaine Cook, Chris Kampmeier, and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/README.rdoc b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/README.rdoc new file mode 100644 index 000000000..33115325b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/README.rdoc @@ -0,0 +1,189 @@ += FakeWeb + +FakeWeb is a helper for faking web requests in Ruby. It works at a global +level, without modifying code or writing extensive stubs. + + +== Installation + + gem install fakeweb + +Note: the gem was previously available as +FakeWeb+ (capital letters), but now +all versions are simply registered as +fakeweb+. If you have any old +FakeWeb+ +gems lying around, remove them: gem uninstall FakeWeb + + +== Help and discussion + +RDocs for the current release are available at http://fakeweb.rubyforge.org. + +There's a mailing list for questions and discussion at +http://groups.google.com/group/fakeweb-users. + +The main source repository is http://github.com/chrisk/fakeweb. + +== Examples + +Start by requiring FakeWeb: + + require 'fakeweb' + +=== Registering basic string responses + + FakeWeb.register_uri(:get, "http://example.com/test1", :body => "Hello World!") + + Net::HTTP.get(URI.parse("http://example.com/test1")) + => "Hello World!" + + Net::HTTP.get(URI.parse("http://example.com/test2")) + => FakeWeb is bypassed and the response from a real request is returned + +You can also call register_uri with a regular expression, to match +more than one URI. + + FakeWeb.register_uri(:get, %r|http://example\.com/|, :body => "Hello World!") + + Net::HTTP.get(URI.parse("http://example.com/test3")) + => "Hello World!" + +=== Replaying a recorded response + + page = `curl -is http://www.google.com/` + FakeWeb.register_uri(:get, "http://www.google.com/", :response => page) + + Net::HTTP.get(URI.parse("http://www.google.com/")) + # => Full response, including headers + +=== Adding a custom status to the response + + FakeWeb.register_uri(:get, "http://example.com/", :body => "Nothing to be found 'round here", + :status => ["404", "Not Found"]) + + Net::HTTP.start("example.com") do |req| + response = req.get("/") + response.code # => "404" + response.message # => "Not Found" + response.body # => "Nothing to be found 'round here" + end + +=== Responding to any HTTP method + + FakeWeb.register_uri(:any, "http://example.com", :body => "response for any HTTP method") + +If you use the :any symbol, the URI you specify will be completely +stubbed out (regardless of the HTTP method of the request). This can be useful +for RPC-style services, where the HTTP method isn't significant. (Older +versions of FakeWeb always behaved like this, and didn't accept the first ++method+ argument above; this syntax is now deprecated.) + +=== Rotating responses + +You can optionally call FakeWeb.register_uri with an array of options +hashes; these are used, in order, to respond to repeated requests. Once you run +out of responses, further requests always receive the last response. (You can +also send a response more than once before rotating, by specifying a +:times option for that response.) + + FakeWeb.register_uri(:delete, "http://example.com/posts/1", + [{:body => "Post 1 deleted.", :status => ["200", "OK"]}, + {:body => "Post not found", :status => ["404", "Not Found"]}]) + + Net::HTTP.start("example.com") do |req| + req.delete("/posts/1").body # => "Post 1 deleted" + req.delete("/posts/1").body # => "Post not found" + req.delete("/posts/1").body # => "Post not found" + end + +=== Using HTTP basic authentication + +You can fake requests that use basic authentication by adding +userinfo+ strings +to your URIs: + + FakeWeb.register_uri(:get, "http://example.com/secret", :body => "Unauthorized", :status => ["401", "Unauthorized"]) + FakeWeb.register_uri(:get, "http://user:pass@example.com/secret", :body => "Authorized") + + Net::HTTP.start("example.com") do |http| + req = Net::HTTP::Get.new("/secret") + http.request(req) # => "Unauthorized" + req.basic_auth("user", "pass") + http.request(req) # => "Authorized" + end + +=== Clearing registered URIs + +The FakeWeb registry is a singleton that lasts for the duration of your program, +maintaining every fake response you register. If needed, you can clean out the +registry and remove all registered URIs: + + FakeWeb.clean_registry + +=== Blocking all real requests + +When you're using FakeWeb to replace _all_ of your requests, it's useful to +catch when requests are made for unregistered URIs (unlike the default +behavior, which is to pass those requests through to Net::HTTP as usual). + + FakeWeb.allow_net_connect = false + Net::HTTP.get(URI.parse("http://example.com/")) + => raises FakeWeb::NetConnectNotAllowedError + + FakeWeb.allow_net_connect = true + Net::HTTP.get(URI.parse("http://example.com/")) + => FakeWeb is bypassed and the response from a real request is returned + +It's recommended that you set FakeWeb.allow_net_connect = false in the +setup for your tests. + +==== Allowing requests to a specific server + +If you want to prevent your tests from hitting the internet while allowing +access to a specific server for integration testing, you can assign a URI or ++Regexp+ to be used as a whitelist for outbound requests: + + FakeWeb.allow_net_connect = %r[^https?://localhost] + Net::HTTP.get(URI.parse("http://localhost/path")) # => allowed + Net::HTTP.get(URI.parse("http://example.com/")) # => raises FakeWeb::NetConnectNotAllowedError + +=== Specifying HTTP response headers + +When you register a response using the :body option, you're only +setting the body of the response. If you want to add headers to these responses, +simply add the header as an option to +register_uri+: + + FakeWeb.register_uri(:get, "http://example.com/hello.txt", :body => "Hello", :content_type => "text/plain") + +This sets the "Content-Type" header in the response. + +=== Checking the last request + +It's often useful to retrieve the last request made by your code, so you can +write tests for its content. FakeWeb keeps track of the last request, whether it +was stubbed or not: + + Net::HTTP.get(URI.parse("http://example.com")) + FakeWeb.last_request # => Net::HTTP::Get request object + +== More info + +FakeWeb lets you decouple your test environment from live services without +modifying code or writing extensive stubs. + +In addition to the conceptual advantage of having idempotent request +behaviour, FakeWeb makes tests run faster than if they were made to remote (or +even local) web servers. It also makes it possible to run tests without a +network connection or in situations where the server is behind a firewall or +has host-based access controls. + +FakeWeb works with anything based on Net::HTTP--both higher-level wrappers, +like OpenURI, as well as a ton of libraries for popular web services. + + +== Known Issues + +* Request bodies are ignored, including PUT and POST parameters. If you need + different responses for different request bodies, you need to request + different URLs, and register different responses for each. (Query strings are + fully supported, though.) We're currently considering how the API should + change to add support for request bodies in 1.3.0. Your input would be really + helpful: see http://groups.google.com/group/fakeweb-users/browse_thread/thread/44d190a6b12e4273 + for a discussion of some different options. Thanks! diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/Rakefile b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/Rakefile new file mode 100644 index 000000000..e3c5298c6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/Rakefile @@ -0,0 +1,67 @@ +require 'rubygems' +require 'rake' + +version = '1.3.0' + +begin + require 'jeweler' + Jeweler::Tasks.new do |gem| + gem.name = "fakeweb" + gem.rubyforge_project = "fakeweb" + gem.version = version + gem.summary = "A tool for faking responses to HTTP requests" + gem.description = "FakeWeb is a helper for faking web requests in Ruby. It works at a global level, without modifying code or writing extensive stubs." + gem.email = ["chris@kampers.net", "romeda@gmail.com"] + gem.authors = ["Chris Kampmeier", "Blaine Cook"] + gem.homepage = "http://github.com/chrisk/fakeweb" + gem.add_development_dependency "mocha", ">= 0.9.5" + end +rescue LoadError + puts "Jeweler (or a dependency) not available. Install it with: gem install jeweler" +end + + +require 'rake/testtask' +Rake::TestTask.new(:test) do |test| + test.test_files = FileList["test/**/*.rb"].exclude("test/test_helper.rb", "test/vendor") + test.libs << "test" + test.verbose = false + test.warning = true +end + +task :default => [:check_dependencies, :test] + + +begin + require 'rcov/rcovtask' + Rcov::RcovTask.new do |t| + t.test_files = FileList["test/**/*.rb"].exclude("test/test_helper.rb", "test/vendor") + t.libs << "test" + t.rcov_opts << "--sort coverage" + t.rcov_opts << "--exclude gems" + t.warning = true + end +rescue LoadError + print "rcov support disabled " + if RUBY_PLATFORM =~ /java/ + puts "(running under JRuby)" + else + puts "(install RCov to enable the `rcov` task)" + end +end + + +begin + require 'sdoc' + require 'rdoc/task' + Rake::RDocTask.new do |rdoc| + rdoc.main = "README.rdoc" + rdoc.rdoc_files.include("README.rdoc", "CHANGELOG", "LICENSE.txt", "lib/*.rb") + rdoc.title = "FakeWeb #{version} API Documentation" + rdoc.rdoc_dir = "doc" + rdoc.template = "direct" + rdoc.options << "--line-numbers" << "--show-hash" << "--charset=utf-8" + end +rescue LoadError + puts "SDoc (or a dependency) not available. Install it with: gem install sdoc" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/fakeweb.gemspec b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/fakeweb.gemspec new file mode 100644 index 000000000..39b23f001 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/fakeweb.gemspec @@ -0,0 +1,126 @@ +# Generated by jeweler +# DO NOT EDIT THIS FILE DIRECTLY +# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{fakeweb} + s.version = "1.3.0" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Chris Kampmeier", "Blaine Cook"] + s.date = %q{2010-08-22} + s.description = %q{FakeWeb is a helper for faking web requests in Ruby. It works at a global level, without modifying code or writing extensive stubs.} + s.email = ["chris@kampers.net", "romeda@gmail.com"] + s.extra_rdoc_files = [ + "LICENSE.txt", + "README.rdoc" + ] + s.files = [ + ".autotest", + ".gitignore", + "CHANGELOG", + "LICENSE.txt", + "README.rdoc", + "Rakefile", + "fakeweb.gemspec", + "lib/fake_web.rb", + "lib/fake_web/ext/net_http.rb", + "lib/fake_web/registry.rb", + "lib/fake_web/responder.rb", + "lib/fake_web/response.rb", + "lib/fake_web/stub_socket.rb", + "lib/fake_web/utility.rb", + "lib/fakeweb.rb", + "test/fixtures/google_response_from_curl", + "test/fixtures/google_response_with_transfer_encoding", + "test/fixtures/google_response_without_transfer_encoding", + "test/fixtures/test_example.txt", + "test/fixtures/test_txt_file", + "test/test_allow_net_connect.rb", + "test/test_deprecations.rb", + "test/test_fake_authentication.rb", + "test/test_fake_web.rb", + "test/test_fake_web_open_uri.rb", + "test/test_helper.rb", + "test/test_last_request.rb", + "test/test_missing_open_uri.rb", + "test/test_missing_pathname.rb", + "test/test_other_net_http_libraries.rb", + "test/test_precedence.rb", + "test/test_query_string.rb", + "test/test_regexes.rb", + "test/test_response_headers.rb", + "test/test_trailing_slashes.rb", + "test/test_utility.rb", + "test/vendor/right_http_connection-1.2.4/History.txt", + "test/vendor/right_http_connection-1.2.4/Manifest.txt", + "test/vendor/right_http_connection-1.2.4/README.txt", + "test/vendor/right_http_connection-1.2.4/Rakefile", + "test/vendor/right_http_connection-1.2.4/lib/net_fix.rb", + "test/vendor/right_http_connection-1.2.4/lib/right_http_connection.rb", + "test/vendor/right_http_connection-1.2.4/setup.rb", + "test/vendor/samuel-0.2.1/.document", + "test/vendor/samuel-0.2.1/.gitignore", + "test/vendor/samuel-0.2.1/LICENSE", + "test/vendor/samuel-0.2.1/README.rdoc", + "test/vendor/samuel-0.2.1/Rakefile", + "test/vendor/samuel-0.2.1/VERSION", + "test/vendor/samuel-0.2.1/lib/samuel.rb", + "test/vendor/samuel-0.2.1/lib/samuel/net_http.rb", + "test/vendor/samuel-0.2.1/lib/samuel/request.rb", + "test/vendor/samuel-0.2.1/samuel.gemspec", + "test/vendor/samuel-0.2.1/test/request_test.rb", + "test/vendor/samuel-0.2.1/test/samuel_test.rb", + "test/vendor/samuel-0.2.1/test/test_helper.rb", + "test/vendor/samuel-0.2.1/test/thread_test.rb" + ] + s.homepage = %q{http://github.com/chrisk/fakeweb} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{fakeweb} + s.rubygems_version = %q{1.3.7} + s.summary = %q{A tool for faking responses to HTTP requests} + s.test_files = [ + "test/test_allow_net_connect.rb", + "test/test_deprecations.rb", + "test/test_fake_authentication.rb", + "test/test_fake_web.rb", + "test/test_fake_web_open_uri.rb", + "test/test_helper.rb", + "test/test_last_request.rb", + "test/test_missing_open_uri.rb", + "test/test_missing_pathname.rb", + "test/test_other_net_http_libraries.rb", + "test/test_precedence.rb", + "test/test_query_string.rb", + "test/test_regexes.rb", + "test/test_response_headers.rb", + "test/test_trailing_slashes.rb", + "test/test_utility.rb", + "test/vendor/right_http_connection-1.2.4/lib/net_fix.rb", + "test/vendor/right_http_connection-1.2.4/lib/right_http_connection.rb", + "test/vendor/right_http_connection-1.2.4/setup.rb", + "test/vendor/samuel-0.2.1/lib/samuel/net_http.rb", + "test/vendor/samuel-0.2.1/lib/samuel/request.rb", + "test/vendor/samuel-0.2.1/lib/samuel.rb", + "test/vendor/samuel-0.2.1/test/request_test.rb", + "test/vendor/samuel-0.2.1/test/samuel_test.rb", + "test/vendor/samuel-0.2.1/test/test_helper.rb", + "test/vendor/samuel-0.2.1/test/thread_test.rb" + ] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then + s.add_development_dependency(%q, [">= 0.9.5"]) + else + s.add_dependency(%q, [">= 0.9.5"]) + end + else + s.add_dependency(%q, [">= 0.9.5"]) + end +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web.rb new file mode 100644 index 000000000..77fbdaec4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web.rb @@ -0,0 +1,215 @@ +require 'singleton' + +require 'fake_web/ext/net_http' +require 'fake_web/registry' +require 'fake_web/response' +require 'fake_web/responder' +require 'fake_web/stub_socket' +require 'fake_web/utility' + +FakeWeb::Utility.record_loaded_net_http_replacement_libs +FakeWeb::Utility.puts_warning_for_net_http_around_advice_libs_if_needed + +module FakeWeb + + # Returns the version string for the copy of FakeWeb you have loaded. + VERSION = '1.3.0' + + # Resets the FakeWeb Registry. This will force all subsequent web requests to + # behave as real requests. + def self.clean_registry + Registry.instance.clean_registry + end + + # Enables or disables real HTTP connections for requests that don't match + # registered URIs. + # + # If you set FakeWeb.allow_net_connect = false and subsequently try + # to make a request to a URI you haven't registered with #register_uri, a + # NetConnectNotAllowedError will be raised. This is handy when you want to + # make sure your tests are self-contained, or want to catch the scenario + # when a URI is changed in implementation code without a corresponding test + # change. + # + # When FakeWeb.allow_net_connect = true (the default), requests to + # URIs not stubbed with FakeWeb are passed through to Net::HTTP. + # + # If you assign a +String+, +URI+, or +Regexp+ object, unstubbed requests + # will be allowed if they match that value. This is useful when you want to + # allow access to a local server for integration testing, while still + # preventing your tests from using the internet. + def self.allow_net_connect=(allowed) + case allowed + when String, URI, Regexp + @allow_all_connections = false + Registry.instance.register_passthrough_uri(allowed) + else + @allow_all_connections = allowed + Registry.instance.remove_passthrough_uri + end + end + + # Enable pass-through to Net::HTTP by default. + self.allow_net_connect = true + + # Returns +true+ if requests to URIs not registered with FakeWeb are passed + # through to Net::HTTP for normal processing (the default). Returns +false+ + # if an exception is raised for these requests. + # + # If you've assigned a +String+, +URI+, or +Regexp+ to + # FakeWeb.allow_net_connect=, you must supply a URI to check + # against that filter. Otherwise, an ArgumentError will be raised. + def self.allow_net_connect?(uri = nil) + if Registry.instance.passthrough_uri_map.any? + raise ArgumentError, "You must supply a URI to test" if uri.nil? + Registry.instance.passthrough_uri_matches?(uri) + else + @allow_all_connections + end + end + + # This exception is raised if you set FakeWeb.allow_net_connect = + # false and subsequently try to make a request to a URI you haven't + # stubbed. + class NetConnectNotAllowedError < StandardError; end; + + # This exception is raised if a Net::HTTP request matches more than one of + # the stubs you've registered. To fix the problem, remove a duplicate + # registration or disambiguate any regular expressions by making them more + # specific. + class MultipleMatchingURIsError < StandardError; end; + + # call-seq: + # FakeWeb.register_uri(method, uri, options) + # + # Register requests using the HTTP method specified by the symbol +method+ + # for +uri+ to be handled according to +options+. If you specify the method + # :any, the response will be reigstered for any request for +uri+. + # +uri+ can be a +String+, +URI+, or +Regexp+ object. +options+ must be either + # a +Hash+ or an +Array+ of +Hashes+ (see below), which must contain one of + # these two keys: + # + # :body:: + # A string which is used as the body of the response. If the string refers + # to a valid filesystem path, the contents of that file will be read and used + # as the body of the response instead. (This used to be two options, + # :string and :file, respectively. These are now deprecated.) + # :response:: + # Either a Net::HTTPResponse, an +IO+, or a +String+ which is used + # as the full response for the request. + # + # The easier way by far is to pass the :response option to + # +register_uri+ as a +String+ or an (open for reads) +IO+ object which + # will be used as the complete HTTP response, including headers and body. + # If the string points to a readable file, this file will be used as the + # content for the request. + # + # To obtain a complete response document, you can use the +curl+ command, + # like so: + # + # curl -i http://example.com > response_from_example.com + # + # which can then be used in your test environment like so: + # + # FakeWeb.register_uri(:get, "http://example.com", :response => "response_from_example.com") + # + # See the Net::HTTPResponse + # documentation[http://ruby-doc.org/stdlib/libdoc/net/http/rdoc/classes/Net/HTTPResponse.html] + # for more information on creating custom response objects. + # + # +options+ may also be an +Array+ containing a list of the above-described + # +Hash+. In this case, FakeWeb will rotate through each response. You can + # optionally repeat a response more than once before rotating: + # + # :times:: + # The number of times this response will be used before moving on to the + # next one. The last response will be repeated indefinitely, regardless of + # its :times parameter. + # + # Two optional arguments are also accepted: + # + # :status:: + # Passing :status as a two-value array will set the response code + # and message. The defaults are 200 and OK, respectively. + # Example: + # FakeWeb.register_uri(:get, "http://example.com", :body => "Go away!", :status => [404, "Not Found"]) + # :exception:: + # The argument passed via :exception will be raised when the + # specified URL is requested. Any +Exception+ class is valid. Example: + # FakeWeb.register_uri(:get, "http://example.com", :exception => Net::HTTPError) + # + # If you're using the :body response type, you can pass additional + # options to specify the HTTP headers to be used in the response. Example: + # + # FakeWeb.register_uri(:get, "http://example.com/index.txt", :body => "Hello", :content_type => "text/plain") + # + # You can also pass an array of header values to include a header in the + # response more than once: + # + # FakeWeb.register_uri(:get, "http://example.com", :set_cookie => ["name=value", "example=1"]) + def self.register_uri(*args) + case args.length + when 3 + Registry.instance.register_uri(*args) + when 2 + print_missing_http_method_deprecation_warning(*args) + Registry.instance.register_uri(:any, *args) + else + raise ArgumentError.new("wrong number of arguments (#{args.length} for 3)") + end + end + + # call-seq: + # FakeWeb.response_for(method, uri) + # + # Returns the faked Net::HTTPResponse object associated with +method+ and +uri+. + def self.response_for(*args, &block) #:nodoc: :yields: response + case args.length + when 2 + Registry.instance.response_for(*args, &block) + when 1 + print_missing_http_method_deprecation_warning(*args) + Registry.instance.response_for(:any, *args, &block) + else + raise ArgumentError.new("wrong number of arguments (#{args.length} for 2)") + end + end + + # call-seq: + # FakeWeb.registered_uri?(method, uri) + # + # Returns true if a +method+ request for +uri+ is registered with FakeWeb. + # Specify a method of :any to check against all HTTP methods. + def self.registered_uri?(*args) + case args.length + when 2 + Registry.instance.registered_uri?(*args) + when 1 + print_missing_http_method_deprecation_warning(*args) + Registry.instance.registered_uri?(:any, *args) + else + raise ArgumentError.new("wrong number of arguments (#{args.length} for 2)") + end + end + + # Returns the request object from the last request made via Net::HTTP. + def self.last_request + @last_request + end + + def self.last_request=(request) #:nodoc: + @last_request = request + end + + private + + def self.print_missing_http_method_deprecation_warning(*args) + method = caller.first.match(/`(.*?)'/)[1] + new_args = args.map { |a| a.inspect }.unshift(":any") + new_args.last.gsub!(/^\{|\}$/, "").gsub!("=>", " => ") if args.last.is_a?(Hash) + $stderr.puts + $stderr.puts "Deprecation warning: FakeWeb requires an HTTP method argument (or use :any). Try this:" + $stderr.puts " FakeWeb.#{method}(#{new_args.join(', ')})" + $stderr.puts "Called at #{caller[1]}" + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/ext/net_http.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/ext/net_http.rb new file mode 100644 index 000000000..4ff3e9a10 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/ext/net_http.rb @@ -0,0 +1,72 @@ +require 'net/http' +require 'net/https' +require 'stringio' + +module Net #:nodoc: all + + class BufferedIO + def initialize_with_fakeweb(io, debug_output = nil) + @read_timeout = 60 + @rbuf = '' + @debug_output = debug_output + + @io = case io + when Socket, OpenSSL::SSL::SSLSocket, IO + io + when String + if !io.include?("\0") && File.exists?(io) && !File.directory?(io) + File.open(io, "r") + else + StringIO.new(io) + end + end + raise "Unable to create local socket" unless @io + end + alias_method :initialize_without_fakeweb, :initialize + alias_method :initialize, :initialize_with_fakeweb + end + + class HTTP + class << self + def socket_type_with_fakeweb + FakeWeb::StubSocket + end + alias_method :socket_type_without_fakeweb, :socket_type + alias_method :socket_type, :socket_type_with_fakeweb + end + + def request_with_fakeweb(request, body = nil, &block) + FakeWeb.last_request = request + + uri = FakeWeb::Utility.request_uri_as_string(self, request) + method = request.method.downcase.to_sym + + if FakeWeb.registered_uri?(method, uri) + @socket = Net::HTTP.socket_type.new + FakeWeb::Utility.produce_side_effects_of_net_http_request(request, body) + FakeWeb.response_for(method, uri, &block) + elsif FakeWeb.allow_net_connect?(uri) + connect_without_fakeweb + request_without_fakeweb(request, body, &block) + else + uri = FakeWeb::Utility.strip_default_port_from_uri(uri) + raise FakeWeb::NetConnectNotAllowedError, + "Real HTTP connections are disabled. Unregistered request: #{request.method} #{uri}" + end + end + alias_method :request_without_fakeweb, :request + alias_method :request, :request_with_fakeweb + + + def connect_with_fakeweb + unless @@alredy_checked_for_net_http_replacement_libs ||= false + FakeWeb::Utility.puts_warning_for_net_http_replacement_libs_if_needed + @@alredy_checked_for_net_http_replacement_libs = true + end + nil + end + alias_method :connect_without_fakeweb, :connect + alias_method :connect, :connect_with_fakeweb + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/registry.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/registry.rb new file mode 100644 index 000000000..9a4a34e68 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/registry.rb @@ -0,0 +1,127 @@ +module FakeWeb + class Registry #:nodoc: + include Singleton + + attr_accessor :uri_map, :passthrough_uri_map + + def initialize + clean_registry + end + + def clean_registry + self.uri_map = Hash.new { |hash, key| hash[key] = {} } + end + + def register_uri(method, uri, options) + uri_map[normalize_uri(uri)][method] = [*[options]].flatten.collect do |option| + FakeWeb::Responder.new(method, uri, option, option[:times]) + end + end + + def registered_uri?(method, uri) + !responders_for(method, uri).empty? + end + + def response_for(method, uri, &block) + responders = responders_for(method, uri) + return nil if responders.empty? + + next_responder = responders.last + responders.each do |responder| + if responder.times and responder.times > 0 + responder.times -= 1 + next_responder = responder + break + end + end + + next_responder.response(&block) + end + + def register_passthrough_uri(uri) + self.passthrough_uri_map = {normalize_uri(uri) => {:any => true}} + end + + def remove_passthrough_uri + self.passthrough_uri_map = {} + end + + def passthrough_uri_matches?(uri) + uri = normalize_uri(uri) + uri_map_matches(passthrough_uri_map, :any, uri, URI) || + uri_map_matches(passthrough_uri_map, :any, uri, Regexp) + end + + private + + def responders_for(method, uri) + uri = normalize_uri(uri) + + uri_map_matches(uri_map, method, uri, URI) || + uri_map_matches(uri_map, :any, uri, URI) || + uri_map_matches(uri_map, method, uri, Regexp) || + uri_map_matches(uri_map, :any, uri, Regexp) || + [] + end + + def uri_map_matches(map, method, uri, type_to_check = URI) + uris_to_check = variations_of_uri_as_strings(uri) + + matches = map.select { |registered_uri, method_hash| + registered_uri.is_a?(type_to_check) && method_hash.has_key?(method) + }.select { |registered_uri, method_hash| + if type_to_check == URI + uris_to_check.include?(registered_uri.to_s) + elsif type_to_check == Regexp + uris_to_check.any? { |u| u.match(registered_uri) } + end + } + + if matches.size > 1 + raise MultipleMatchingURIsError, + "More than one registered URI matched this request: #{method.to_s.upcase} #{uri}" + end + + matches.map { |_, method_hash| method_hash[method] }.first + end + + + def variations_of_uri_as_strings(uri_object) + normalized_uri = normalize_uri(uri_object.dup) + normalized_uri_string = normalized_uri.to_s + + variations = [normalized_uri_string] + + # if the port is implied in the original, add a copy with an explicit port + if normalized_uri.default_port == normalized_uri.port + variations << normalized_uri_string.sub( + /#{Regexp.escape(normalized_uri.request_uri)}$/, + ":#{normalized_uri.port}#{normalized_uri.request_uri}") + end + + variations + end + + def normalize_uri(uri) + return uri if uri.is_a?(Regexp) + normalized_uri = + case uri + when URI then uri + when String + uri = 'http://' + uri unless uri.match('^https?://') + URI.parse(uri) + end + normalized_uri.query = sort_query_params(normalized_uri.query) + normalized_uri.normalize + end + + def sort_query_params(query) + if query.nil? || query.empty? + nil + else + query.split('&').sort.join('&') + end + end + + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/responder.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/responder.rb new file mode 100644 index 000000000..573fec3d4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/responder.rb @@ -0,0 +1,122 @@ +module FakeWeb + class Responder #:nodoc: + + attr_accessor :method, :uri, :options, :times + KNOWN_OPTIONS = [:body, :exception, :response, :status].freeze + + def initialize(method, uri, options, times) + self.method = method + self.uri = uri + self.options = options + self.times = times ? times : 1 + + if options.has_key?(:file) || options.has_key?(:string) + print_file_string_options_deprecation_warning + options[:body] = options.delete(:file) || options.delete(:string) + end + end + + def response(&block) + if has_baked_response? + response = baked_response + else + code, msg = meta_information + response = Net::HTTPResponse.send(:response_class, code.to_s).new("1.0", code.to_s, msg) + response.instance_variable_set(:@body, body) + headers_extracted_from_options.each do |name, value| + if value.respond_to?(:each) + value.each { |v| response.add_field(name, v) } + else + response[name] = value + end + end + end + + response.instance_variable_set(:@read, true) + response.extend FakeWeb::Response + + optionally_raise(response) + + yield response if block_given? + + response + end + + private + + def headers_extracted_from_options + options.reject {|name, _| KNOWN_OPTIONS.include?(name) }.map { |name, value| + [name.to_s.split("_").map { |segment| segment.capitalize }.join("-"), value] + } + end + + def body + return '' if options[:body].nil? + + options[:body] = options[:body].to_s if defined?(Pathname) && options[:body].is_a?(Pathname) + + if !options[:body].include?("\0") && File.exists?(options[:body]) && !File.directory?(options[:body]) + File.read(options[:body]) + else + options[:body] + end + end + + def baked_response + return options[:response] if options[:response].is_a?(Net::HTTPResponse) + + if options[:response].is_a?(String) || (defined?(Pathname) && options[:response].is_a?(Pathname)) + socket = Net::BufferedIO.new(options[:response].to_s) + r = Net::HTTPResponse.read_new(socket) + + # Store the original transfer-encoding + saved_transfer_encoding = r.instance_eval { + @header['transfer-encoding'] if @header.key?('transfer-encoding') + } + + # Read the body of response + r.instance_eval { @header['transfer-encoding'] = nil } + r.reading_body(socket, true) {} + + # Delete the transfer-encoding key from r.@header if there wasn't one; + # otherwise, restore the saved_transfer_encoding + if saved_transfer_encoding.nil? + r.instance_eval { @header.delete('transfer-encoding') } + else + r.instance_eval { @header['transfer-encoding'] = saved_transfer_encoding } + end + r + else + raise StandardError, "Handler unimplemented for response #{options[:response]}" + end + end + + def has_baked_response? + options.has_key?(:response) + end + + def optionally_raise(response) + return unless options.has_key?(:exception) + + case options[:exception].to_s + when "Net::HTTPError", "OpenURI::HTTPError" + raise options[:exception].new('Exception from FakeWeb', response) + else + raise options[:exception].new('Exception from FakeWeb') + end + end + + def meta_information + options.has_key?(:status) ? options[:status] : [200, 'OK'] + end + + def print_file_string_options_deprecation_warning + which = options.has_key?(:file) ? :file : :string + $stderr.puts + $stderr.puts "Deprecation warning: FakeWeb's :#{which} option has been renamed to :body." + $stderr.puts "Just replace :#{which} with :body in your FakeWeb.register_uri calls." + $stderr.puts "Called at #{caller[6]}" + end + + end +end \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/response.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/response.rb new file mode 100644 index 000000000..41ba2557b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/response.rb @@ -0,0 +1,10 @@ +module FakeWeb + module Response #:nodoc: + + def read_body(*args, &block) + yield @body if block_given? + @body + end + + end +end \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/stub_socket.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/stub_socket.rb new file mode 100644 index 000000000..008681ca6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/stub_socket.rb @@ -0,0 +1,15 @@ +module FakeWeb + class StubSocket #:nodoc: + + def initialize(*args) + end + + def closed? + @closed ||= true + end + + def readuntil(*args) + end + + end +end \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/utility.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/utility.rb new file mode 100644 index 000000000..bd5d7161c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fake_web/utility.rb @@ -0,0 +1,87 @@ +module FakeWeb + module Utility #:nodoc: + + def self.decode_userinfo_from_header(header) + header.sub(/^Basic /, "").unpack("m").first + end + + def self.encode_unsafe_chars_in_userinfo(userinfo) + unsafe_in_userinfo = /[^#{URI::REGEXP::PATTERN::UNRESERVED};&=+$,]|^(#{URI::REGEXP::PATTERN::ESCAPED})/ + userinfo.split(":").map { |part| uri_escape(part, unsafe_in_userinfo) }.join(":") + end + + def self.strip_default_port_from_uri(uri) + case uri + when %r{^http://} then uri.sub(%r{:80(/|$)}, '\1') + when %r{^https://} then uri.sub(%r{:443(/|$)}, '\1') + else uri + end + end + + # Returns a string with a normalized version of a Net::HTTP request's URI. + def self.request_uri_as_string(net_http, request) + protocol = net_http.use_ssl? ? "https" : "http" + + path = request.path + path = URI.parse(request.path).request_uri if request.path =~ /^http/ + + if request["authorization"] =~ /^Basic / + userinfo = FakeWeb::Utility.decode_userinfo_from_header(request["authorization"]) + userinfo = FakeWeb::Utility.encode_unsafe_chars_in_userinfo(userinfo) + "@" + else + userinfo = "" + end + + uri = "#{protocol}://#{userinfo}#{net_http.address}:#{net_http.port}#{path}" + end + + # Wrapper for URI escaping that switches between URI::Parser#escape and + # URI.escape for 1.9-compatibility + def self.uri_escape(*args) + if URI.const_defined?(:Parser) + URI::Parser.new.escape(*args) + else + URI.escape(*args) + end + end + + def self.produce_side_effects_of_net_http_request(request, body) + request.set_body_internal(body) + request.content_length = request.body.length unless request.body.nil? + end + + def self.puts_warning_for_net_http_around_advice_libs_if_needed + libs = {"Samuel" => defined?(Samuel)} + warnings = libs.select { |_, loaded| loaded }.map do |name, _| + <<-TEXT.gsub(/ {10}/, '') + \e[1mWarning: FakeWeb was loaded after #{name}\e[0m + * #{name}'s code is being ignored when a request is handled by FakeWeb, + because both libraries work by patching Net::HTTP. + * To fix this, just reorder your requires so that FakeWeb is before #{name}. + TEXT + end + $stderr.puts "\n" + warnings.join("\n") + "\n" if warnings.any? + end + + def self.record_loaded_net_http_replacement_libs + libs = {"RightHttpConnection" => defined?(RightHttpConnection)} + @loaded_net_http_replacement_libs = libs.map { |name, loaded| name if loaded }.compact + end + + def self.puts_warning_for_net_http_replacement_libs_if_needed + libs = {"RightHttpConnection" => defined?(RightHttpConnection)} + warnings = libs.select { |_, loaded| loaded }. + reject { |name, _| @loaded_net_http_replacement_libs.include?(name) }. + map do |name, _| + <<-TEXT.gsub(/ {10}/, '') + \e[1mWarning: #{name} was loaded after FakeWeb\e[0m + * FakeWeb's code is being ignored, because #{name} replaces parts of + Net::HTTP without deferring to other libraries. This will break Net::HTTP requests. + * To fix this, just reorder your requires so that #{name} is before FakeWeb. + TEXT + end + $stderr.puts "\n" + warnings.join("\n") + "\n" if warnings.any? + end + + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fakeweb.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fakeweb.rb new file mode 100644 index 000000000..6982966bf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/lib/fakeweb.rb @@ -0,0 +1,2 @@ +# So you can require "fakeweb" instead of "fake_web" +require "fake_web" \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_from_curl b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_from_curl new file mode 100644 index 000000000..fe2fe3945 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_from_curl @@ -0,0 +1,12 @@ +HTTP/1.1 200 OK +Cache-Control: private, max-age=0 +Date: Sun, 01 Feb 2009 02:16:24 GMT +Expires: -1 +Content-Type: text/html; charset=ISO-8859-1 +Set-Cookie: PREF=ID=a6d9b5f5a4056dfe:TM=1233454584:LM=1233454584:S=U9pSwSu4eQwOPenX; expires=Tue, 01-Feb-2011 02:16:24 GMT; path=/; domain=.google.com +Server: gws +Transfer-Encoding: chunked + +Google

Google

 
  Advanced Search
  Preferences
  Language Tools

Share what you know. Write a Knol.


Advertising Programs - Business Solutions - About Google

©2009 - Privacy

\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_with_transfer_encoding b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_with_transfer_encoding new file mode 100644 index 000000000..82025d36e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_with_transfer_encoding @@ -0,0 +1,17 @@ +HTTP/1.1 200 OK +Cache-Control: private, max-age=0 +Date: Sun, 01 Feb 2009 01:54:36 GMT +Expires: -1 +Content-Type: text/html; charset=ISO-8859-1 +Set-Cookie: PREF=ID=4320bcaa30d097de:TM=1233453276:LM=1233453276:S=Eio39bg_nIabTxzL; expires=Tue, 01-Feb-2011 01:54:36 GMT; path=/; domain=.google.com +Server: gws +Transfer-Encoding: chunked + +fef +Google

Google

 
  Advanced Search
  Preferences
  Language Tools

Share what you know. Write a Knol.


Advertising Programs - Business Solutions - About Google

©2009 - Privacy

+0 + diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_without_transfer_encoding b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_without_transfer_encoding new file mode 100644 index 000000000..51433c990 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/google_response_without_transfer_encoding @@ -0,0 +1,11 @@ +HTTP/1.0 200 OK +Cache-Control: private, max-age=0 +Date: Sun, 01 Feb 2009 01:55:33 GMT +Expires: -1 +Content-Type: text/html; charset=ISO-8859-1 +Set-Cookie: PREF=ID=3c140c3eb4c4f516:TM=1233453333:LM=1233453333:S=OH7sElk2hOWkb9ot; expires=Tue, 01-Feb-2011 01:55:33 GMT; path=/; domain=.google.com +Server: gws + +Google

Google

 
  Advanced Search
  Preferences
  Language Tools

Share what you know. Write a Knol.


Advertising Programs - Business Solutions - About Google

©2009 - Privacy

\ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_example.txt b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_example.txt new file mode 100644 index 000000000..6310da9df --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_example.txt @@ -0,0 +1 @@ +test example content \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_txt_file b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_txt_file new file mode 100644 index 000000000..8cf2f17fe --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/fixtures/test_txt_file @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 \ No newline at end of file diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_allow_net_connect.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_allow_net_connect.rb new file mode 100644 index 000000000..25f4d3d64 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_allow_net_connect.rb @@ -0,0 +1,168 @@ +require 'test_helper' + +class TestFakeWebAllowNetConnect < Test::Unit::TestCase + def test_unregistered_requests_are_passed_through_when_allow_net_connect_is_true + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request + Net::HTTP.get(URI.parse("http://images.apple.com/main/rss/hotnews/hotnews.rss")) + end + + def test_raises_for_unregistered_requests_when_allow_net_connect_is_false + FakeWeb.allow_net_connect = false + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.com/")) + end + end + + def test_unregistered_requests_are_passed_through_when_allow_net_connect_is_the_same_string + FakeWeb.allow_net_connect = "http://images.apple.com/main/rss/hotnews/hotnews.rss" + setup_expectations_for_real_apple_hot_news_request + Net::HTTP.get(URI.parse("http://images.apple.com/main/rss/hotnews/hotnews.rss")) + end + + def test_unregistered_requests_are_passed_through_when_allow_net_connect_is_the_same_string_with_default_port + FakeWeb.allow_net_connect = "http://images.apple.com:80/main/rss/hotnews/hotnews.rss" + setup_expectations_for_real_apple_hot_news_request + Net::HTTP.get(URI.parse("http://images.apple.com/main/rss/hotnews/hotnews.rss")) + end + + def test_unregistered_requests_are_passed_through_when_allow_net_connect_is_the_same_uri + FakeWeb.allow_net_connect = URI.parse("http://images.apple.com/main/rss/hotnews/hotnews.rss") + setup_expectations_for_real_apple_hot_news_request + Net::HTTP.get(URI.parse("http://images.apple.com/main/rss/hotnews/hotnews.rss")) + end + + def test_unregistered_requests_are_passed_through_when_allow_net_connect_is_a_matching_regexp + FakeWeb.allow_net_connect = %r[^http://images\.apple\.com] + setup_expectations_for_real_apple_hot_news_request + Net::HTTP.get(URI.parse("http://images.apple.com/main/rss/hotnews/hotnews.rss")) + end + + def test_raises_for_unregistered_requests_when_allow_net_connect_is_a_different_string + FakeWeb.allow_net_connect = "http://example.com" + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.com/path")) + end + end + + def test_raises_for_unregistered_requests_when_allow_net_connect_is_a_different_uri + FakeWeb.allow_net_connect = URI.parse("http://example.com") + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.com/path")) + end + end + + def test_raises_for_unregistered_requests_when_allow_net_connect_is_a_non_matching_regexp + FakeWeb.allow_net_connect = %r[example\.net] + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.com")) + end + end + + def test_changing_allow_net_connect_from_string_to_false_corretly_removes_whitelist + FakeWeb.allow_net_connect = "http://example.com" + FakeWeb.allow_net_connect = false + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.com")) + end + end + + def test_changing_allow_net_connect_from_true_to_string_corretly_limits_connections + FakeWeb.allow_net_connect = true + FakeWeb.allow_net_connect = "http://example.com" + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.net")) + end + end + + def test_exception_message_includes_unregistered_request_method_and_uri_but_no_default_port + FakeWeb.allow_net_connect = false + exception = assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.get(URI.parse("http://example.com/")) + end + assert exception.message.include?("GET http://example.com/") + + exception = assert_raise FakeWeb::NetConnectNotAllowedError do + http = Net::HTTP.new("example.com", 443) + http.use_ssl = true + http.get("/") + end + assert exception.message.include?("GET https://example.com/") + end + + def test_exception_message_includes_unregistered_request_port_when_not_default + FakeWeb.allow_net_connect = false + exception = assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.start("example.com", 8000) { |http| http.get("/") } + end + assert exception.message.include?("GET http://example.com:8000/") + + exception = assert_raise FakeWeb::NetConnectNotAllowedError do + http = Net::HTTP.new("example.com", 4433) + http.use_ssl = true + http.get("/") + end + assert exception.message.include?("GET https://example.com:4433/") + end + + def test_exception_message_includes_unregistered_request_port_when_not_default_with_path + FakeWeb.allow_net_connect = false + exception = assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.start("example.com", 8000) { |http| http.get("/test") } + end + assert exception.message.include?("GET http://example.com:8000/test") + + exception = assert_raise FakeWeb::NetConnectNotAllowedError do + http = Net::HTTP.new("example.com", 4433) + http.use_ssl = true + http.get("/test") + end + assert exception.message.include?("GET https://example.com:4433/test") + end + + def test_question_mark_method_returns_true_after_setting_allow_net_connect_to_true + FakeWeb.allow_net_connect = true + assert FakeWeb.allow_net_connect? + end + + def test_question_mark_method_returns_false_after_setting_allow_net_connect_to_false + FakeWeb.allow_net_connect = false + assert !FakeWeb.allow_net_connect? + end + + def test_question_mark_method_raises_with_no_argument_when_allow_net_connect_is_a_whitelist + FakeWeb.allow_net_connect = "http://example.com" + exception = assert_raise ArgumentError do + FakeWeb.allow_net_connect? + end + assert_equal "You must supply a URI to test", exception.message + end + + def test_question_mark_method_returns_true_when_argument_is_same_uri_as_allow_net_connect_string + FakeWeb.allow_net_connect = "http://example.com" + assert FakeWeb.allow_net_connect?("http://example.com/") + end + + def test_question_mark_method_returns_true_when_argument_matches_allow_net_connect_regexp + FakeWeb.allow_net_connect = %r[^https?://example.com/] + assert FakeWeb.allow_net_connect?("http://example.com/path") + assert FakeWeb.allow_net_connect?("https://example.com:443/") + end + + def test_question_mark_method_returns_false_when_argument_does_not_match_allow_net_connect_regexp + FakeWeb.allow_net_connect = %r[^http://example.com/] + assert !FakeWeb.allow_net_connect?("http://example.com:8080") + end +end + + +class TestFakeWebAllowNetConnectWithCleanState < Test::Unit::TestCase + # Our test_helper.rb sets allow_net_connect = false in an inherited #setup + # method. Disable that here to test the default setting. + def setup; end + def teardown; end + + def test_allow_net_connect_is_true_by_default + assert FakeWeb.allow_net_connect? + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_deprecations.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_deprecations.rb new file mode 100644 index 000000000..e5b8953c5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_deprecations.rb @@ -0,0 +1,54 @@ +require 'test_helper' + +class TestDeprecations < Test::Unit::TestCase + + def test_register_uri_without_method_argument_prints_deprecation_warning + warning = capture_stderr do + FakeWeb.register_uri("http://example.com", :body => "test") + end + assert_match %r(deprecation warning: fakeweb)i, warning + end + + def test_registered_uri_without_method_argument_prints_deprecation_warning + warning = capture_stderr do + FakeWeb.registered_uri?("http://example.com") + end + assert_match %r(deprecation warning: fakeweb)i, warning + end + + def test_response_for_without_method_argument_prints_deprecation_warning + warning = capture_stderr do + FakeWeb.response_for("http://example.com") + end + assert_match %r(deprecation warning: fakeweb)i, warning + end + + def test_register_uri_without_method_argument_prints_deprecation_warning_with_correct_caller + warning = capture_stderr do + FakeWeb.register_uri("http://example.com", :body => "test") + end + assert_match %r(Called at.*?test_deprecations\.rb)i, warning + end + + def test_register_uri_with_string_option_prints_deprecation_warning + warning = capture_stderr do + FakeWeb.register_uri(:get, "http://example.com", :string => "test") + end + assert_match %r(deprecation warning: fakeweb's :string option)i, warning + end + + def test_register_uri_with_file_option_prints_deprecation_warning + warning = capture_stderr do + FakeWeb.register_uri(:get, "http://example.com", :file => fixture_path("test_example.txt")) + end + assert_match %r(deprecation warning: fakeweb's :file option)i, warning + end + + def test_register_uri_with_string_option_prints_deprecation_warning_with_correct_caller + warning = capture_stderr do + FakeWeb.register_uri(:get, "http://example.com", :string => "test") + end + assert_match %r(Called at.*?test_deprecations\.rb)i, warning + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_authentication.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_authentication.rb new file mode 100644 index 000000000..cff276441 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_authentication.rb @@ -0,0 +1,92 @@ +require 'test_helper' + +class TestFakeAuthentication < Test::Unit::TestCase + + def test_register_uri_with_authentication + FakeWeb.register_uri(:get, 'http://user:pass@mock/test_example.txt', :body => "example") + assert FakeWeb.registered_uri?(:get, 'http://user:pass@mock/test_example.txt') + end + + def test_register_uri_with_authentication_doesnt_trigger_without + FakeWeb.register_uri(:get, 'http://user:pass@mock/test_example.txt', :body => "example") + assert !FakeWeb.registered_uri?(:get, 'http://mock/test_example.txt') + end + + def test_register_uri_with_authentication_doesnt_trigger_with_incorrect_credentials + FakeWeb.register_uri(:get, 'http://user:pass@mock/test_example.txt', :body => "example") + assert !FakeWeb.registered_uri?(:get, 'http://user:wrong@mock/test_example.txt') + end + + def test_unauthenticated_request + FakeWeb.register_uri(:get, 'http://mock/auth.txt', :body => 'unauthorized') + http = Net::HTTP.new('mock', 80) + req = Net::HTTP::Get.new('/auth.txt') + assert_equal 'unauthorized', http.request(req).body + end + + def test_authenticated_request + FakeWeb.register_uri(:get, 'http://user:pass@mock/auth.txt', :body => 'authorized') + http = Net::HTTP.new('mock',80) + req = Net::HTTP::Get.new('/auth.txt') + req.basic_auth 'user', 'pass' + assert_equal 'authorized', http.request(req).body + end + + def test_authenticated_request_where_only_userinfo_differs + FakeWeb.register_uri(:get, 'http://user:pass@mock/auth.txt', :body => 'first user') + FakeWeb.register_uri(:get, 'http://user2:pass@mock/auth.txt', :body => 'second user') + http = Net::HTTP.new('mock') + req = Net::HTTP::Get.new('/auth.txt') + req.basic_auth 'user2', 'pass' + assert_equal 'second user', http.request(req).body + end + + def test_basic_auth_support_is_transparent_to_oauth + FakeWeb.register_uri(:get, "http://sp.example.com/protected", :body => "secret") + + # from http://oauth.net/core/1.0/#auth_header + auth_header = <<-HEADER + OAuth realm="http://sp.example.com/", + oauth_consumer_key="0685bd9184jfhq22", + oauth_token="ad180jjd733klru7", + oauth_signature_method="HMAC-SHA1", + oauth_signature="wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D", + oauth_timestamp="137131200", + oauth_nonce="4572616e48616d6d65724c61686176", + oauth_version="1.0" + HEADER + auth_header.gsub!(/\s+/, " ").strip! + + http = Net::HTTP.new("sp.example.com", 80) + response = nil + http.start do |request| + response = request.get("/protected", {"authorization" => auth_header}) + end + assert_equal "secret", response.body + end + + def test_basic_auth_when_userinfo_contains_allowed_unencoded_characters + FakeWeb.register_uri(:get, "http://roses&hel1o,(+$):so;longs=@example.com", :body => "authorized") + http = Net::HTTP.new("example.com") + request = Net::HTTP::Get.new("/") + request.basic_auth("roses&hel1o,(+$)", "so;longs=") + assert_equal "authorized", http.request(request).body + end + + def test_basic_auth_when_userinfo_contains_encoded_at_sign + FakeWeb.register_uri(:get, "http://user%40example.com:secret@example.com", :body => "authorized") + http = Net::HTTP.new("example.com") + request = Net::HTTP::Get.new("/") + request.basic_auth("user@example.com", "secret") + assert_equal "authorized", http.request(request).body + end + + def test_basic_auth_when_userinfo_contains_allowed_encoded_characters + FakeWeb.register_uri(:get, "http://us%20er:sec%20%2F%2Fret%3F@example.com", :body => "authorized") + http = Net::HTTP.new("example.com") + request = Net::HTTP::Get.new("/") + request.basic_auth("us er", "sec //ret?") + assert_equal "authorized", http.request(request).body + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web.rb new file mode 100644 index 000000000..c6e6b59eb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web.rb @@ -0,0 +1,590 @@ +require 'test_helper' + +class TestFakeWeb < Test::Unit::TestCase + + def test_register_uri + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => "example") + assert FakeWeb.registered_uri?(:get, 'http://mock/test_example.txt') + end + + def test_register_uri_with_wrong_number_of_arguments + assert_raises ArgumentError do + FakeWeb.register_uri("http://example.com") + end + assert_raises ArgumentError do + FakeWeb.register_uri(:get, "http://example.com", "/example", :body => "example") + end + end + + def test_registered_uri_with_wrong_number_of_arguments + assert_raises ArgumentError do + FakeWeb.registered_uri? + end + assert_raises ArgumentError do + FakeWeb.registered_uri?(:get, "http://example.com", "/example") + end + end + + def test_response_for_with_wrong_number_of_arguments + assert_raises ArgumentError do + FakeWeb.response_for + end + assert_raises ArgumentError do + FakeWeb.response_for(:get, "http://example.com", "/example") + end + end + + def test_register_uri_without_domain_name + assert_raises URI::InvalidURIError do + FakeWeb.register_uri(:get, 'test_example2.txt', fixture_path("test_example.txt")) + end + end + + def test_register_uri_with_port_and_check_with_port + FakeWeb.register_uri(:get, 'http://example.com:3000/', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com:3000/') + end + + def test_register_uri_with_port_and_check_without_port + FakeWeb.register_uri(:get, 'http://example.com:3000/', :body => 'foo') + assert !FakeWeb.registered_uri?(:get, 'http://example.com/') + end + + def test_register_uri_with_default_port_for_http_and_check_without_port + FakeWeb.register_uri(:get, 'http://example.com:80/', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com/') + end + + def test_register_uri_with_default_port_for_https_and_check_without_port + FakeWeb.register_uri(:get, 'https://example.com:443/', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'https://example.com/') + end + + def test_register_uri_with_no_port_for_http_and_check_with_default_port + FakeWeb.register_uri(:get, 'http://example.com/', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com:80/') + end + + def test_register_uri_with_no_port_for_https_and_check_with_default_port + FakeWeb.register_uri(:get, 'https://example.com/', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'https://example.com:443/') + end + + def test_register_uri_with_no_port_for_https_and_check_with_443_on_http + FakeWeb.register_uri(:get, 'https://example.com/', :body => 'foo') + assert !FakeWeb.registered_uri?(:get, 'http://example.com:443/') + end + + def test_register_uri_with_no_port_for_http_and_check_with_80_on_https + FakeWeb.register_uri(:get, 'http://example.com/', :body => 'foo') + assert !FakeWeb.registered_uri?(:get, 'https://example.com:80/') + end + + def test_register_uri_for_any_method_explicitly + FakeWeb.register_uri(:any, "http://example.com/rpc_endpoint", :body => "OK") + assert FakeWeb.registered_uri?(:get, "http://example.com/rpc_endpoint") + assert FakeWeb.registered_uri?(:post, "http://example.com/rpc_endpoint") + assert FakeWeb.registered_uri?(:put, "http://example.com/rpc_endpoint") + assert FakeWeb.registered_uri?(:delete, "http://example.com/rpc_endpoint") + assert FakeWeb.registered_uri?(:any, "http://example.com/rpc_endpoint") + capture_stderr do # silence deprecation warning + assert FakeWeb.registered_uri?("http://example.com/rpc_endpoint") + end + end + + def test_register_uri_for_get_method_only + FakeWeb.register_uri(:get, "http://example.com/users", :body => "User list") + assert FakeWeb.registered_uri?(:get, "http://example.com/users") + assert !FakeWeb.registered_uri?(:post, "http://example.com/users") + assert !FakeWeb.registered_uri?(:put, "http://example.com/users") + assert !FakeWeb.registered_uri?(:delete, "http://example.com/users") + assert !FakeWeb.registered_uri?(:any, "http://example.com/users") + capture_stderr do # silence deprecation warning + assert !FakeWeb.registered_uri?("http://example.com/users") + end + end + + def test_clean_registry_affects_registered_uri + FakeWeb.register_uri(:get, "http://example.com", :body => "registered") + assert FakeWeb.registered_uri?(:get, "http://example.com") + FakeWeb.clean_registry + assert !FakeWeb.registered_uri?(:get, "http://example.com") + end + + def test_clean_registry_affects_net_http_requests + FakeWeb.register_uri(:get, "http://example.com", :body => "registered") + response = Net::HTTP.start("example.com") { |query| query.get("/") } + assert_equal "registered", response.body + FakeWeb.clean_registry + assert_raise FakeWeb::NetConnectNotAllowedError do + Net::HTTP.start("example.com") { |query| query.get("/") } + end + end + + def test_response_for_with_registered_uri + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + assert_equal 'test example content', FakeWeb.response_for(:get, 'http://mock/test_example.txt').body + end + + def test_response_for_with_unknown_uri + assert_nil FakeWeb.response_for(:get, 'http://example.com/') + end + + def test_response_for_with_put_method + FakeWeb.register_uri(:put, "http://example.com", :body => "response") + assert_equal 'response', FakeWeb.response_for(:put, "http://example.com").body + end + + def test_response_for_with_any_method_explicitly + FakeWeb.register_uri(:any, "http://example.com", :body => "response") + assert_equal 'response', FakeWeb.response_for(:get, "http://example.com").body + assert_equal 'response', FakeWeb.response_for(:any, "http://example.com").body + end + + def test_content_for_registered_uri_with_port_and_request_with_port + FakeWeb.register_uri(:get, 'http://example.com:3000/', :body => 'test example content') + response = Net::HTTP.start('example.com', 3000) { |http| http.get('/') } + assert_equal 'test example content', response.body + end + + def test_content_for_registered_uri_with_default_port_for_http_and_request_without_port + FakeWeb.register_uri(:get, 'http://example.com:80/', :body => 'test example content') + response = Net::HTTP.start('example.com') { |http| http.get('/') } + assert_equal 'test example content', response.body + end + + def test_content_for_registered_uri_with_no_port_for_http_and_request_with_default_port + FakeWeb.register_uri(:get, 'http://example.com/', :body => 'test example content') + response = Net::HTTP.start('example.com', 80) { |http| http.get('/') } + assert_equal 'test example content', response.body + end + + def test_content_for_registered_uri_with_default_port_for_https_and_request_with_default_port + FakeWeb.register_uri(:get, 'https://example.com:443/', :body => 'test example content') + http = Net::HTTP.new('example.com', 443) + http.use_ssl = true + response = http.get('/') + assert_equal 'test example content', response.body + end + + def test_content_for_registered_uri_with_no_port_for_https_and_request_with_default_port + FakeWeb.register_uri(:get, 'https://example.com/', :body => 'test example content') + http = Net::HTTP.new('example.com', 443) + http.use_ssl = true + response = http.get('/') + assert_equal 'test example content', response.body + end + + def test_content_for_registered_uris_with_ports_on_same_domain_and_request_without_port + FakeWeb.register_uri(:get, 'http://example.com:3000/', :body => 'port 3000') + FakeWeb.register_uri(:get, 'http://example.com/', :body => 'port 80') + response = Net::HTTP.start('example.com') { |http| http.get('/') } + assert_equal 'port 80', response.body + end + + def test_content_for_registered_uris_with_ports_on_same_domain_and_request_with_port + FakeWeb.register_uri(:get, 'http://example.com:3000/', :body => 'port 3000') + FakeWeb.register_uri(:get, 'http://example.com/', :body => 'port 80') + response = Net::HTTP.start('example.com', 3000) { |http| http.get('/') } + assert_equal 'port 3000', response.body + end + + def test_content_for_registered_uri_with_get_method_only + FakeWeb.allow_net_connect = false + FakeWeb.register_uri(:get, "http://example.com/", :body => "test example content") + http = Net::HTTP.new('example.com') + assert_equal 'test example content', http.get('/').body + assert_raises(FakeWeb::NetConnectNotAllowedError) { http.post('/', nil) } + assert_raises(FakeWeb::NetConnectNotAllowedError) { http.put('/', nil) } + assert_raises(FakeWeb::NetConnectNotAllowedError) { http.delete('/') } + end + + def test_content_for_registered_uri_with_any_method_explicitly + FakeWeb.allow_net_connect = false + FakeWeb.register_uri(:any, "http://example.com/", :body => "test example content") + http = Net::HTTP.new('example.com') + assert_equal 'test example content', http.get('/').body + assert_equal 'test example content', http.post('/', nil).body + assert_equal 'test example content', http.put('/', nil).body + assert_equal 'test example content', http.delete('/').body + end + + def test_content_for_registered_uri_with_any_method_implicitly + FakeWeb.allow_net_connect = false + capture_stderr do # silence deprecation warning + FakeWeb.register_uri("http://example.com/", :body => "test example content") + end + + http = Net::HTTP.new('example.com') + assert_equal 'test example content', http.get('/').body + assert_equal 'test example content', http.post('/', nil).body + assert_equal 'test example content', http.put('/', nil).body + assert_equal 'test example content', http.delete('/').body + end + + def test_mock_request_with_block + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + response = Net::HTTP.start('mock') { |http| http.get('/test_example.txt') } + assert_equal 'test example content', response.body + end + + def test_request_with_registered_body_yields_the_response_body_to_a_request_block + FakeWeb.register_uri(:get, "http://example.com", :body => "content") + body = nil + Net::HTTP.start("example.com") do |http| + http.get("/") do |response_body| + body = response_body + end + end + assert_equal "content", body + end + + def test_request_with_registered_response_yields_the_response_body_to_a_request_block + fake_response = Net::HTTPOK.new('1.1', '200', 'OK') + fake_response.instance_variable_set(:@body, "content") + FakeWeb.register_uri(:get, 'http://example.com', :response => fake_response) + body = nil + Net::HTTP.start("example.com") do |http| + http.get("/") do |response_body| + body = response_body + end + end + assert_equal "content", body + end + + def test_mock_request_with_undocumented_full_uri_argument_style + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + response = Net::HTTP.start('mock') { |query| query.get('http://mock/test_example.txt') } + assert_equal 'test example content', response.body + end + + def test_mock_request_with_undocumented_full_uri_argument_style_and_query + FakeWeb.register_uri(:get, 'http://mock/test_example.txt?a=b', :body => 'test query content') + response = Net::HTTP.start('mock') { |query| query.get('http://mock/test_example.txt?a=b') } + assert_equal 'test query content', response.body + end + + def test_mock_post + FakeWeb.register_uri(:post, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + response = Net::HTTP.start('mock') { |query| query.post('/test_example.txt', '') } + assert_equal 'test example content', response.body + end + + def test_mock_post_with_string_as_registered_uri + FakeWeb.register_uri(:post, 'http://mock/test_string.txt', :body => 'foo') + response = Net::HTTP.start('mock') { |query| query.post('/test_string.txt', '') } + assert_equal 'foo', response.body + end + + def test_mock_post_with_body_sets_the_request_body + FakeWeb.register_uri(:post, "http://example.com/posts", :status => [201, "Created"]) + http = Net::HTTP.new("example.com") + request = Net::HTTP::Post.new("/posts") + http.request(request, "title=Test") + assert_equal "title=Test", request.body + assert_equal 10, request.content_length + end + + def test_mock_post_with_body_using_other_syntax_sets_the_request_body + FakeWeb.register_uri(:post, "http://example.com/posts", :status => [201, "Created"]) + http = Net::HTTP.new("example.com") + request = Net::HTTP::Post.new("/posts") + request.body = "title=Test" + http.request(request) + assert_equal "title=Test", request.body + assert_equal 10, request.content_length + end + + def test_real_post_with_body_sets_the_request_body + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request :method => "POST", + :path => "/posts", :request_body => "title=Test" + http = Net::HTTP.new("images.apple.com") + request = Net::HTTP::Post.new("/posts") + request["Content-Type"] = "application/x-www-form-urlencoded" + http.request(request, "title=Test") + assert_equal "title=Test", request.body + assert_equal 10, request.content_length + end + + def test_mock_get_with_request_as_registered_uri + fake_response = Net::HTTPOK.new('1.1', '200', 'OK') + FakeWeb.register_uri(:get, 'http://mock/test_response', :response => fake_response) + response = Net::HTTP.start('mock') { |query| query.get('/test_response') } + assert_equal fake_response, response + end + + def test_mock_get_with_request_from_file_as_registered_uri + FakeWeb.register_uri(:get, 'http://www.google.com/', :response => fixture_path("google_response_without_transfer_encoding")) + response = Net::HTTP.start('www.google.com') { |query| query.get('/') } + assert_equal '200', response.code + assert response.body.include?('Google') + end + + def test_mock_post_with_request_from_file_as_registered_uri + FakeWeb.register_uri(:post, 'http://www.google.com/', :response => fixture_path("google_response_without_transfer_encoding")) + response = Net::HTTP.start('www.google.com') { |query| query.post('/', '') } + assert_equal "200", response.code + assert response.body.include?('Google') + end + + def test_proxy_request + FakeWeb.register_uri(:get, 'http://www.example.com/', :body => "hello world") + FakeWeb.register_uri(:get, 'http://your.proxy.host/', :body => "lala") + + response = nil + Net::HTTP::Proxy('your.proxy.host', 8080).start('www.example.com') do |http| + response = http.get('/') + end + assert_equal "hello world", response.body + end + + def test_https_request + FakeWeb.register_uri(:get, 'https://www.example.com/', :body => "Hello World") + http = Net::HTTP.new('www.example.com', 443) + http.use_ssl = true + response = http.get('/') + assert_equal "Hello World", response.body + end + + def test_register_unimplemented_response + FakeWeb.register_uri(:get, 'http://mock/unimplemented', :response => 1) + assert_raises StandardError do + Net::HTTP.start('mock') { |q| q.get('/unimplemented') } + end + end + + def test_specifying_nil_for_body + FakeWeb.register_uri(:head, "http://example.com", :body => nil) + response = Net::HTTP.start("example.com") { |query| query.head("/") } + assert_equal "", response.body + end + + def test_real_http_request + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request + + resp = nil + Net::HTTP.start('images.apple.com') do |query| + resp = query.get('/main/rss/hotnews/hotnews.rss') + end + assert resp.body.include?('Apple') + assert resp.body.include?('News') + end + + def test_real_http_request_with_undocumented_full_uri_argument_style + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request(:path => 'http://images.apple.com/main/rss/hotnews/hotnews.rss') + + resp = nil + Net::HTTP.start('images.apple.com') do |query| + resp = query.get('http://images.apple.com/main/rss/hotnews/hotnews.rss') + end + assert resp.body.include?('Apple') + assert resp.body.include?('News') + end + + def test_real_https_request + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request(:port => 443) + + http = Net::HTTP.new('images.apple.com', 443) + http.use_ssl = true + http.verify_mode = OpenSSL::SSL::VERIFY_NONE # silence certificate warning + response = http.get('/main/rss/hotnews/hotnews.rss') + assert response.body.include?('Apple') + assert response.body.include?('News') + end + + def test_real_request_on_same_domain_as_mock + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request + + FakeWeb.register_uri(:get, 'http://images.apple.com/test_string.txt', :body => 'foo') + + resp = nil + Net::HTTP.start('images.apple.com') do |query| + resp = query.get('/main/rss/hotnews/hotnews.rss') + end + assert resp.body.include?('Apple') + assert resp.body.include?('News') + end + + def test_mock_request_on_real_domain + FakeWeb.register_uri(:get, 'http://images.apple.com/test_string.txt', :body => 'foo') + resp = nil + Net::HTTP.start('images.apple.com') do |query| + resp = query.get('/test_string.txt') + end + assert_equal 'foo', resp.body + end + + def test_mock_post_that_raises_exception + FakeWeb.register_uri(:post, 'http://mock/raising_exception.txt', :exception => StandardError) + assert_raises(StandardError) do + Net::HTTP.start('mock') do |query| + query.post('/raising_exception.txt', 'some data') + end + end + end + + def test_mock_post_that_raises_an_http_error + FakeWeb.register_uri(:post, 'http://mock/raising_exception.txt', :exception => Net::HTTPError) + assert_raises(Net::HTTPError) do + Net::HTTP.start('mock') do |query| + query.post('/raising_exception.txt', '') + end + end + end + + def test_raising_an_exception_that_requires_an_argument_to_instantiate + FakeWeb.register_uri(:get, "http://example.com/timeout.txt", :exception => Timeout::Error) + assert_raises(Timeout::Error) do + Net::HTTP.get(URI.parse("http://example.com/timeout.txt")) + end + end + + def test_mock_instance_syntax + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + response = nil + uri = URI.parse('http://mock/test_example.txt') + http = Net::HTTP.new(uri.host, uri.port) + response = http.start do + http.get(uri.path) + end + + assert_equal 'test example content', response.body + end + + def test_mock_via_nil_proxy + response = nil + proxy_address = nil + proxy_port = nil + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + uri = URI.parse('http://mock/test_example.txt') + http = Net::HTTP::Proxy(proxy_address, proxy_port).new( + uri.host, (uri.port or 80)) + response = http.start do + http.get(uri.path) + end + + assert_equal 'test example content', response.body + end + + def test_response_type + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => "test") + response = Net::HTTP.start('mock') { |http| http.get('/test_example.txt') } + assert_kind_of Net::HTTPSuccess, response + end + + def test_mock_request_that_raises_an_http_error_with_a_specific_status + FakeWeb.register_uri(:get, 'http://mock/raising_exception.txt', :exception => Net::HTTPError, :status => ['404', 'Not Found']) + exception = assert_raises(Net::HTTPError) do + Net::HTTP.start('mock') { |http| http.get('/raising_exception.txt') } + end + assert_equal '404', exception.response.code + assert_equal 'Not Found', exception.response.msg + end + + def test_mock_rotate_responses + FakeWeb.register_uri(:get, 'http://mock/multiple_test_example.txt', + [ {:body => fixture_path("test_example.txt"), :times => 2}, + {:body => "thrice", :times => 3}, + {:body => "ever_more"} ]) + + uri = URI.parse('http://mock/multiple_test_example.txt') + 2.times { assert_equal 'test example content', Net::HTTP.get(uri) } + 3.times { assert_equal 'thrice', Net::HTTP.get(uri) } + 4.times { assert_equal 'ever_more', Net::HTTP.get(uri) } + end + + def test_mock_request_using_response_with_transfer_encoding_header_has_valid_transfer_encoding_header + FakeWeb.register_uri(:get, 'http://www.google.com/', :response => fixture_path("google_response_with_transfer_encoding")) + response = Net::HTTP.start('www.google.com') { |query| query.get('/') } + assert_not_nil response['transfer-encoding'] + assert response['transfer-encoding'] == 'chunked' + end + + def test_mock_request_using_response_without_transfer_encoding_header_does_not_have_a_transfer_encoding_header + FakeWeb.register_uri(:get, 'http://www.google.com/', :response => fixture_path("google_response_without_transfer_encoding")) + response = nil + response = Net::HTTP.start('www.google.com') { |query| query.get('/') } + assert !response.key?('transfer-encoding') + end + + def test_mock_request_using_response_from_curl_has_original_transfer_encoding_header + FakeWeb.register_uri(:get, 'http://www.google.com/', :response => fixture_path("google_response_from_curl")) + response = Net::HTTP.start('www.google.com') { |query| query.get('/') } + assert_not_nil response['transfer-encoding'] + assert response['transfer-encoding'] == 'chunked' + end + + def test_txt_file_should_have_three_lines + FakeWeb.register_uri(:get, 'http://www.google.com/', :body => fixture_path("test_txt_file")) + response = Net::HTTP.start('www.google.com') { |query| query.get('/') } + assert response.body.split(/\n/).size == 3, "response has #{response.body.split(/\n/).size} lines should have 3" + end + + def test_requiring_fakeweb_instead_of_fake_web + require "fakeweb" + end + + def test_registering_with_string_containing_null_byte + # Regression test for File.exists? raising an ArgumentError ("string + # contains null byte") since :response first tries to find by filename. + # The string should be treated as a response body, instead, and an + # EOFError is raised when the byte is encountered. + FakeWeb.register_uri(:get, "http://example.com", :response => "test\0test") + assert_raise EOFError do + Net::HTTP.get(URI.parse("http://example.com")) + end + + FakeWeb.register_uri(:get, "http://example.com", :body => "test\0test") + body = Net::HTTP.get(URI.parse("http://example.com")) + assert_equal "test\0test", body + end + + def test_registering_with_string_that_is_a_directory_name + # Similar to above, but for Errno::EISDIR being raised since File.exists? + # returns true for directories + FakeWeb.register_uri(:get, "http://example.com", :response => File.dirname(__FILE__)) + assert_raise EOFError do + body = Net::HTTP.get(URI.parse("http://example.com")) + end + + FakeWeb.register_uri(:get, "http://example.com", :body => File.dirname(__FILE__)) + body = Net::HTTP.get(URI.parse("http://example.com")) + assert_equal File.dirname(__FILE__), body + end + + def test_registering_with_a_body_pointing_to_a_pathname + path = Pathname.new(fixture_path("test_example.txt")) + FakeWeb.register_uri(:get, "http://example.com", :body => path) + response = Net::HTTP.start("example.com") { |http| http.get("/") } + assert_equal "test example content", response.body + end + + def test_registering_with_a_response_pointing_to_a_pathname + path = Pathname.new(fixture_path("google_response_without_transfer_encoding")) + FakeWeb.register_uri(:get, "http://google.com", :response => path) + response = Net::HTTP.start("google.com") { |http| http.get("/") } + assert response.body.include?("Google") + end + + def test_http_version_from_string_response + FakeWeb.register_uri(:get, "http://example.com", :body => "example") + response = Net::HTTP.start("example.com") { |http| http.get("/") } + assert_equal "1.0", response.http_version + end + + def test_http_version_from_file_response + FakeWeb.register_uri(:get, "http://example.com", :body => fixture_path("test_example.txt")) + response = Net::HTTP.start("example.com") { |http| http.get("/") } + assert_equal "1.0", response.http_version + end + + def test_version + assert_equal "1.3.0", FakeWeb::VERSION + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web_open_uri.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web_open_uri.rb new file mode 100644 index 000000000..699a64762 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_fake_web_open_uri.rb @@ -0,0 +1,58 @@ +require 'test_helper' + +class TestFakeWebOpenURI < Test::Unit::TestCase + + def test_content_for_registered_uri + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + assert_equal 'test example content', FakeWeb.response_for(:get, 'http://mock/test_example.txt').body + end + + def test_mock_open + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + assert_equal 'test example content', open('http://mock/test_example.txt').read + end + + def test_mock_open_with_string_as_registered_uri + FakeWeb.register_uri(:get, 'http://mock/test_string.txt', :body => 'foo') + assert_equal 'foo', open('http://mock/test_string.txt').string + end + + def test_real_open + FakeWeb.allow_net_connect = true + setup_expectations_for_real_apple_hot_news_request + resp = open('http://images.apple.com/main/rss/hotnews/hotnews.rss') + assert_equal "200", resp.status.first + body = resp.read + assert body.include?('Apple') + assert body.include?('News') + end + + def test_mock_open_that_raises_exception + FakeWeb.register_uri(:get, 'http://mock/raising_exception.txt', :exception => StandardError) + assert_raises(StandardError) do + open('http://mock/raising_exception.txt') + end + end + + def test_mock_open_that_raises_an_http_error + FakeWeb.register_uri(:get, 'http://mock/raising_exception.txt', :exception => OpenURI::HTTPError) + assert_raises(OpenURI::HTTPError) do + open('http://mock/raising_exception.txt') + end + end + + def test_mock_open_that_raises_an_http_error_with_a_specific_status + FakeWeb.register_uri(:get, 'http://mock/raising_exception.txt', :exception => OpenURI::HTTPError, :status => ['123', 'jodel']) + exception = assert_raises(OpenURI::HTTPError) do + open('http://mock/raising_exception.txt') + end + assert_equal '123', exception.io.code + assert_equal 'jodel', exception.io.message + end + + def test_mock_open_with_block + FakeWeb.register_uri(:get, 'http://mock/test_example.txt', :body => fixture_path("test_example.txt")) + body = open('http://mock/test_example.txt') { |f| f.readlines } + assert_equal 'test example content', body.first + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_helper.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_helper.rb new file mode 100644 index 000000000..b181391b1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_helper.rb @@ -0,0 +1,90 @@ +require 'test/unit' +require 'open-uri' +require 'pathname' +require 'fake_web' +require 'rbconfig' +require 'rubygems' +require 'mocha' + + +# Give all tests a common setup and teardown that prevents shared state +class Test::Unit::TestCase + alias setup_without_fakeweb setup + def setup + FakeWeb.clean_registry + @original_allow_net_connect = FakeWeb.allow_net_connect? + FakeWeb.allow_net_connect = false + end + + alias teardown_without_fakeweb teardown + def teardown + FakeWeb.allow_net_connect = @original_allow_net_connect + end +end + + +module FakeWebTestHelper + + def fixture_path(basename) + "test/fixtures/#{basename}" + end + + def capture_stderr + $stderr = StringIO.new + yield + $stderr.rewind && $stderr.read + ensure + $stderr = STDERR + end + + # The path to the current ruby interpreter. Adapted from Rake's FileUtils. + def ruby_path + ext = ((RbConfig::CONFIG['ruby_install_name'] =~ /\.(com|cmd|exe|bat|rb|sh)$/) ? "" : RbConfig::CONFIG['EXEEXT']) + File.join(RbConfig::CONFIG['bindir'], RbConfig::CONFIG['ruby_install_name'] + ext).sub(/.*\s.*/m, '"\&"') + end + + # Sets several expectations (using Mocha) that a real HTTP request makes it + # past FakeWeb to the socket layer. You can use this when you need to check + # that a request isn't handled by FakeWeb. + def setup_expectations_for_real_request(options = {}) + # Socket handling + if options[:port] == 443 + socket = mock("SSLSocket") + OpenSSL::SSL::SSLSocket.expects(:===).with(socket).returns(true).at_least_once + OpenSSL::SSL::SSLSocket.expects(:new).with(socket, instance_of(OpenSSL::SSL::SSLContext)).returns(socket).at_least_once + socket.stubs(:sync_close=).returns(true) + socket.expects(:connect).with().at_least_once + else + socket = mock("TCPSocket") + Socket.expects(:===).with(socket).at_least_once.returns(true) + end + + TCPSocket.expects(:open).with(options[:host], options[:port]).returns(socket).at_least_once + socket.stubs(:closed?).returns(false) + socket.stubs(:close).returns(true) + + # Request/response handling + request_parts = ["#{options[:method]} #{options[:path]} HTTP/1.1", "Host: #{options[:host]}"] + socket.expects(:write).with(all_of(includes(request_parts[0]), includes(request_parts[1]))).returns(100) + if !options[:request_body].nil? + socket.expects(:write).with(options[:request_body]).returns(100) + end + + read_method = RUBY_VERSION >= "1.9.2" ? :read_nonblock : :sysread + socket.expects(read_method).at_least_once.returns("HTTP/1.1 #{options[:response_code]} #{options[:response_message]}\nContent-Length: #{options[:response_body].length}\n\n#{options[:response_body]}").then.raises(EOFError) + end + + + # A helper that calls #setup_expectations_for_real_request for you, using + # defaults for our commonly used test request to images.apple.com. + def setup_expectations_for_real_apple_hot_news_request(options = {}) + defaults = { :host => "images.apple.com", :port => 80, :method => "GET", + :path => "/main/rss/hotnews/hotnews.rss", + :response_code => 200, :response_message => "OK", + :response_body => "Apple Hot News" } + setup_expectations_for_real_request(defaults.merge(options)) + end + +end + +Test::Unit::TestCase.send(:include, FakeWebTestHelper) diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_last_request.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_last_request.rb new file mode 100644 index 000000000..7868c83a1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_last_request.rb @@ -0,0 +1,29 @@ +require 'test_helper' + +class TestLastRequest < Test::Unit::TestCase + + def test_last_request_returns_correct_net_http_request_class + FakeWeb.register_uri(:get, "http://example.com", :status => [200, "OK"]) + Net::HTTP.start("example.com") { |http| http.get("/") } + assert_instance_of Net::HTTP::Get, FakeWeb.last_request + end + + def test_last_request_has_correct_method_path_and_body_for_get + FakeWeb.register_uri(:get, "http://example.com", :status => [200, "OK"]) + Net::HTTP.start("example.com") { |http| http.get("/") } + assert_equal "GET", FakeWeb.last_request.method + assert_equal "/", FakeWeb.last_request.path + assert_nil FakeWeb.last_request.body + assert_nil FakeWeb.last_request.content_length + end + + def test_last_request_has_correct_method_path_and_body_for_post + FakeWeb.register_uri(:post, "http://example.com/posts", :status => [201, "Created"]) + Net::HTTP.start("example.com") { |http| http.post("/posts", "title=Test") } + assert_equal "POST", FakeWeb.last_request.method + assert_equal "/posts", FakeWeb.last_request.path + assert_equal "title=Test", FakeWeb.last_request.body + assert_equal 10, FakeWeb.last_request.content_length + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_open_uri.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_open_uri.rb new file mode 100644 index 000000000..029ba1cab --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_open_uri.rb @@ -0,0 +1,25 @@ +require 'test_helper' + +class TestMissingOpenURI < Test::Unit::TestCase + + def setup + super + @saved_open_uri = OpenURI + Object.send(:remove_const, :OpenURI) + end + + def teardown + super + Object.const_set(:OpenURI, @saved_open_uri) + end + + + def test_register_using_exception_without_open_uri + # regression test for Responder needing OpenURI::HTTPError to be defined + FakeWeb.register_uri(:get, "http://example.com/", :exception => StandardError) + assert_raises(StandardError) do + Net::HTTP.start("example.com") { |http| http.get("/") } + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_pathname.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_pathname.rb new file mode 100644 index 000000000..ee16a0d7b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_missing_pathname.rb @@ -0,0 +1,37 @@ +require 'test_helper' + +class TestMissingPathname < Test::Unit::TestCase + + def setup + super + @saved_pathname = Pathname + Object.send(:remove_const, :Pathname) + end + + def teardown + super + Object.const_set(:Pathname, @saved_pathname) + end + + # FakeWeb supports using Pathname objects where filenames are expected, but + # Pathname isn't required to use FakeWeb. Make sure everything still works + # when Pathname isn't in use. + + def test_register_using_body_without_pathname + FakeWeb.register_uri(:get, "http://example.com/", :body => fixture_path("test_example.txt")) + Net::HTTP.start("example.com") { |http| http.get("/") } + end + + def test_register_using_response_without_pathname + FakeWeb.register_uri(:get, "http://example.com/", :response => fixture_path("google_response_without_transfer_encoding")) + Net::HTTP.start("example.com") { |http| http.get("/") } + end + + def test_register_using_unsupported_response_without_pathname + FakeWeb.register_uri(:get, "http://example.com/", :response => 1) + assert_raise StandardError do + Net::HTTP.start("example.com") { |http| http.get("/") } + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_other_net_http_libraries.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_other_net_http_libraries.rb new file mode 100644 index 000000000..af7e5e276 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_other_net_http_libraries.rb @@ -0,0 +1,36 @@ +require 'test_helper' + +class TestOtherNetHttpLibraries < Test::Unit::TestCase + + def capture_output_from_requiring(libs, additional_code = "") + requires = libs.map { |lib| "require '#{lib}'" }.join("; ") + fakeweb_dir = "#{File.dirname(__FILE__)}/../lib" + vendor_dirs = Dir["#{File.dirname(__FILE__)}/vendor/*/lib"] + load_path_opts = vendor_dirs.unshift(fakeweb_dir).map { |dir| "-I#{dir}" }.join(" ") + + `#{ruby_path} #{load_path_opts} -e "#{requires}; #{additional_code}" 2>&1` + end + + def test_requiring_samuel_before_fakeweb_prints_warning + output = capture_output_from_requiring %w(samuel fakeweb) + assert_match %r(Warning: FakeWeb was loaded after Samuel), output + end + + def test_requiring_samuel_after_fakeweb_does_not_print_warning + output = capture_output_from_requiring %w(fakeweb samuel) + assert output.empty? + end + + def test_requiring_right_http_connection_before_fakeweb_and_then_connecting_does_not_print_warning + additional_code = "Net::HTTP.start('example.com')" + output = capture_output_from_requiring %w(right_http_connection fakeweb), additional_code + assert output.empty? + end + + def test_requiring_right_http_connection_after_fakeweb_and_then_connecting_prints_warning + additional_code = "Net::HTTP.start('example.com')" + output = capture_output_from_requiring %w(fakeweb right_http_connection), additional_code + assert_match %r(Warning: RightHttpConnection was loaded after FakeWeb), output + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_precedence.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_precedence.rb new file mode 100644 index 000000000..388b9f8a1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_precedence.rb @@ -0,0 +1,79 @@ +require 'test_helper' + +class TestPrecedence < Test::Unit::TestCase + + def test_matching_get_strings_have_precedence_over_matching_get_regexes + FakeWeb.register_uri(:get, "http://example.com/test", :body => "string") + FakeWeb.register_uri(:get, %r|http://example\.com/test|, :body => "regex") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "string", response.body + end + + def test_matching_any_strings_have_precedence_over_matching_any_regexes + FakeWeb.register_uri(:any, "http://example.com/test", :body => "string") + FakeWeb.register_uri(:any, %r|http://example\.com/test|, :body => "regex") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "string", response.body + end + + def test_matching_get_strings_have_precedence_over_matching_any_strings + FakeWeb.register_uri(:get, "http://example.com/test", :body => "get method") + FakeWeb.register_uri(:any, "http://example.com/test", :body => "any method") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "get method", response.body + + # registration order should not matter + FakeWeb.register_uri(:any, "http://example.com/test2", :body => "any method") + FakeWeb.register_uri(:get, "http://example.com/test2", :body => "get method") + response = Net::HTTP.start("example.com") { |query| query.get('/test2') } + assert_equal "get method", response.body + end + + def test_matching_any_strings_have_precedence_over_matching_get_regexes + FakeWeb.register_uri(:any, "http://example.com/test", :body => "any string") + FakeWeb.register_uri(:get, %r|http://example\.com/test|, :body => "get regex") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "any string", response.body + end + + def test_registered_strings_and_uris_are_equivalent_so_second_takes_precedence + FakeWeb.register_uri(:get, "http://example.com/test", :body => "string") + FakeWeb.register_uri(:get, URI.parse("http://example.com/test"), :body => "uri") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "uri", response.body + + FakeWeb.register_uri(:get, URI.parse("http://example.com/test2"), :body => "uri") + FakeWeb.register_uri(:get, "http://example.com/test2", :body => "string") + response = Net::HTTP.start("example.com") { |query| query.get('/test2') } + assert_equal "string", response.body + end + + def test_identical_registration_replaces_previous_registration + FakeWeb.register_uri(:get, "http://example.com/test", :body => "first") + FakeWeb.register_uri(:get, "http://example.com/test", :body => "second") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "second", response.body + end + + def test_identical_registration_replaces_previous_registration_accounting_for_normalization + FakeWeb.register_uri(:get, "http://example.com/test?", :body => "first") + FakeWeb.register_uri(:get, "http://example.com:80/test", :body => "second") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "second", response.body + end + + def test_identical_registration_replaces_previous_registration_accounting_for_query_params + FakeWeb.register_uri(:get, "http://example.com/test?a=1&b=2", :body => "first") + FakeWeb.register_uri(:get, "http://example.com/test?b=2&a=1", :body => "second") + response = Net::HTTP.start("example.com") { |query| query.get('/test?a=1&b=2') } + assert_equal "second", response.body + end + + def test_identical_registration_replaces_previous_registration_with_regexes + FakeWeb.register_uri(:get, /test/, :body => "first") + FakeWeb.register_uri(:get, /test/, :body => "second") + response = Net::HTTP.start("example.com") { |query| query.get('/test') } + assert_equal "second", response.body + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_query_string.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_query_string.rb new file mode 100644 index 000000000..11a211848 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_query_string.rb @@ -0,0 +1,45 @@ +require 'test_helper' + +class TestFakeWebQueryString < Test::Unit::TestCase + + def test_register_uri_string_with_query_params + FakeWeb.register_uri(:get, 'http://example.com/?a=1&b=1', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com/?a=1&b=1') + + FakeWeb.register_uri(:post, URI.parse("http://example.org/?a=1&b=1"), :body => "foo") + assert FakeWeb.registered_uri?(:post, "http://example.org/?a=1&b=1") + end + + def test_register_uri_with_query_params_and_check_in_different_order + FakeWeb.register_uri(:get, 'http://example.com/?a=1&b=1', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com/?b=1&a=1') + + FakeWeb.register_uri(:post, URI.parse('http://example.org/?a=1&b=1'), :body => 'foo') + assert FakeWeb.registered_uri?(:post, 'http://example.org/?b=1&a=1') + end + + def test_registered_uri_gets_recognized_with_empty_query_params + FakeWeb.register_uri(:get, 'http://example.com/', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com/?') + + FakeWeb.register_uri(:post, URI.parse('http://example.org/'), :body => 'foo') + assert FakeWeb.registered_uri?(:post, 'http://example.org/?') + end + + def test_register_uri_with_empty_query_params_and_check_with_none + FakeWeb.register_uri(:get, 'http://example.com/?', :body => 'foo') + assert FakeWeb.registered_uri?(:get, 'http://example.com/') + + FakeWeb.register_uri(:post, URI.parse('http://example.org/?'), :body => 'foo') + assert FakeWeb.registered_uri?(:post, 'http://example.org/') + end + + def test_registry_sort_query_params + assert_equal "a=1&b=2", FakeWeb::Registry.instance.send(:sort_query_params, "b=2&a=1") + end + + def test_registry_sort_query_params_sorts_by_value_if_keys_collide + assert_equal "a=1&a=2&b=2", FakeWeb::Registry.instance.send(:sort_query_params, "a=2&b=2&a=1") + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_regexes.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_regexes.rb new file mode 100644 index 000000000..e2eba1db8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_regexes.rb @@ -0,0 +1,157 @@ +require 'test_helper' + +class TestRegexes < Test::Unit::TestCase + + def test_registered_uri_with_pattern + FakeWeb.register_uri(:get, %r|http://example.com/test_example/\d+|, :body => "example") + assert FakeWeb.registered_uri?(:get, "http://example.com/test_example/25") + assert !FakeWeb.registered_uri?(:get, "http://example.com/test_example/abc") + end + + def test_response_for_with_matching_registered_uri + FakeWeb.register_uri(:get, %r|http://www.google.com|, :body => "Welcome to Google!") + assert_equal "Welcome to Google!", FakeWeb.response_for(:get, "http://www.google.com").body + end + + def test_response_for_with_matching_registered_uri_and_get_method_matching_to_any_method + FakeWeb.register_uri(:any, %r|http://www.example.com|, :body => "example") + assert_equal "example", FakeWeb.response_for(:get, "http://www.example.com").body + end + + def test_registered_uri_with_authentication_and_pattern + FakeWeb.register_uri(:get, %r|http://user:pass@mock/example\.\w+|i, :body => "example") + assert FakeWeb.registered_uri?(:get, 'http://user:pass@mock/example.txt') + end + + def test_registered_uri_with_authentication_and_pattern_handles_case_insensitivity + FakeWeb.register_uri(:get, %r|http://user:pass@mock/example\.\w+|i, :body => "example") + assert FakeWeb.registered_uri?(:get, 'http://uSeR:PAss@mock/example.txt') + end + + def test_request_with_authentication_and_pattern_handles_case_insensitivity + FakeWeb.register_uri(:get, %r|http://user:pass@mock/example\.\w+|i, :body => "example") + http = Net::HTTP.new('mock', 80) + req = Net::HTTP::Get.new('/example.txt') + req.basic_auth 'uSeR', 'PAss' + assert_equal "example", http.request(req).body + end + + def test_requesting_a_uri_that_matches_two_registered_regexes_raises_an_error + FakeWeb.register_uri(:get, %r|http://example\.com/|, :body => "first") + FakeWeb.register_uri(:get, %r|http://example\.com/a|, :body => "second") + assert_raise FakeWeb::MultipleMatchingURIsError do + Net::HTTP.start("example.com") { |query| query.get('/a') } + end + end + + def test_requesting_a_uri_that_matches_two_registered_regexes_raises_an_error_including_request_info + FakeWeb.register_uri(:get, %r|http://example\.com/|, :body => "first") + FakeWeb.register_uri(:get, %r|http://example\.com/a|, :body => "second") + begin + Net::HTTP.start("example.com") { |query| query.get('/a') } + rescue FakeWeb::MultipleMatchingURIsError => exception + end + assert exception.message.include?("GET http://example.com/a") + end + + def test_registry_does_not_find_using_mismatched_protocols_or_ports_when_registered_with_both + FakeWeb.register_uri(:get, %r|http://www.example.com:80|, :body => "example") + assert !FakeWeb.registered_uri?(:get, "https://www.example.com:80") + assert !FakeWeb.registered_uri?(:get, "http://www.example.com:443") + end + + def test_registry_finds_using_non_default_port + FakeWeb.register_uri(:get, %r|example\.com:8080|, :body => "example") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:8080/path") + assert FakeWeb.registered_uri?(:get, "https://www.example.com:8080/path") + end + + def test_registry_finds_using_default_port_and_http_when_registered_with_explicit_port_80 + FakeWeb.register_uri(:get, %r|example\.com:80|, :body => "example") + assert FakeWeb.registered_uri?(:get, "http://www.example.com/path") + + # check other permutations, too + assert FakeWeb.registered_uri?(:get, "http://www.example.com:80/path") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:8080/path") + assert FakeWeb.registered_uri?(:get, "https://www.example.com:80/path") + assert FakeWeb.registered_uri?(:get, "https://www.example.com:8080/path") + assert !FakeWeb.registered_uri?(:get, "https://www.example.com/path") + end + + def test_registry_finds_using_default_port_and_https_when_registered_with_explicit_port_443 + FakeWeb.register_uri(:get, %r|example\.com:443|, :body => "example") + assert FakeWeb.registered_uri?(:get, "https://www.example.com/path") + + # check other permutations, too + assert FakeWeb.registered_uri?(:get, "https://www.example.com:443/path") + assert FakeWeb.registered_uri?(:get, "https://www.example.com:44321/path") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:443/path") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:44321/path") + assert !FakeWeb.registered_uri?(:get, "http://www.example.com/path") + end + + def test_registry_only_finds_using_default_port_when_registered_without_if_protocol_matches + FakeWeb.register_uri(:get, %r|http://www.example.com/test|, :body => "example") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:80/test") + assert !FakeWeb.registered_uri?(:get, "http://www.example.com:443/test") + assert !FakeWeb.registered_uri?(:get, "https://www.example.com:443/test") + FakeWeb.register_uri(:get, %r|https://www.example.org/test|, :body => "example") + assert FakeWeb.registered_uri?(:get, "https://www.example.org:443/test") + assert !FakeWeb.registered_uri?(:get, "https://www.example.org:80/test") + assert !FakeWeb.registered_uri?(:get, "http://www.example.org:80/test") + end + + def test_registry_matches_using_mismatched_port_when_registered_without + FakeWeb.register_uri(:get, %r|http://www.example.com|, :body => "example") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:80") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:443") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:12345") + assert !FakeWeb.registered_uri?(:get, "https://www.example.com:443") + assert !FakeWeb.registered_uri?(:get, "https://www.example.com") + end + + def test_registry_matches_using_default_port_for_protocol_when_registered_without_protocol_or_port + FakeWeb.register_uri(:get, %r|www.example.com/home|, :body => "example") + assert FakeWeb.registered_uri?(:get, "http://www.example.com/home") + assert FakeWeb.registered_uri?(:get, "https://www.example.com/home") + assert FakeWeb.registered_uri?(:get, "http://www.example.com:80/home") + assert FakeWeb.registered_uri?(:get, "https://www.example.com:443/home") + assert !FakeWeb.registered_uri?(:get, "https://www.example.com:80/home") + assert !FakeWeb.registered_uri?(:get, "http://www.example.com:443/home") + end + + def test_registry_matches_with_query_params + FakeWeb.register_uri(:get, %r[example.com/list\?(.*&|)important=1], :body => "example") + assert FakeWeb.registered_uri?(:get, "http://example.com/list?hash=123&important=1&unimportant=2") + assert FakeWeb.registered_uri?(:get, "http://example.com/list?hash=123&important=12&unimportant=2") + assert FakeWeb.registered_uri?(:get, "http://example.com/list?important=1&unimportant=2") + assert !FakeWeb.registered_uri?(:get, "http://example.com/list?important=2") + assert !FakeWeb.registered_uri?(:get, "http://example.com/list?important=2&unimportant=1") + assert !FakeWeb.registered_uri?(:get, "http://example.com/list?hash=123&important=2&unimportant=1") + assert !FakeWeb.registered_uri?(:get, "http://example.com/list?notimportant=1&unimportant=1") + end + + def test_registry_does_not_match_when_regex_has_unsorted_query_params + FakeWeb.register_uri(:get, %r[example\.com/list\?b=2&a=1], :body => "example") + assert !FakeWeb.registered_uri?(:get, "http://example.com/list?b=2&a=1") + assert !FakeWeb.registered_uri?(:get, "http://example.com/list?a=1&b=2") + assert !FakeWeb.registered_uri?(:get, "https://example.com:443/list?b=2&a=1") + end + + def test_registry_matches_when_regex_has_sorted_query_params + FakeWeb.register_uri(:get, %r[example\.com/list\?a=1&b=2], :body => "example") + assert FakeWeb.registered_uri?(:get, "http://example.com/list?b=2&a=1") + assert FakeWeb.registered_uri?(:get, "http://example.com/list?a=1&b=2") + assert FakeWeb.registered_uri?(:get, "https://example.com:443/list?b=2&a=1") + end + + def test_registry_matches_quickly_with_lots_of_query_params + # regression test for code that tried to calculate the permutations of the + # query params, which hangs with a large number of params + FakeWeb.register_uri(:get, %r[example.com], :body => "example") + Timeout::timeout(1) do + FakeWeb.registered_uri?(:get, "http://example.com/?a=1&b=2&c=3&d=4&e=5&f=6&g=7&h=8") + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_response_headers.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_response_headers.rb new file mode 100644 index 000000000..45f3f5a99 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_response_headers.rb @@ -0,0 +1,79 @@ +require 'test_helper' + +class TestResponseHeaders < Test::Unit::TestCase + def test_content_type_when_registering_with_string_and_content_type_header_as_symbol_option + FakeWeb.register_uri(:get, "http://example.com/users.json", :body => '[{"username": "chrisk"}]', :content_type => "application/json") + response = Net::HTTP.start("example.com") { |query| query.get("/users.json") } + assert_equal '[{"username": "chrisk"}]', response.body + assert_equal "application/json", response['Content-Type'] + end + + def test_content_type_when_registering_with_string_and_content_type_header_as_string_option + FakeWeb.register_uri(:get, "http://example.com/users.json", :body => '[{"username": "chrisk"}]', 'Content-Type' => "application/json") + response = Net::HTTP.start("example.com") { |query| query.get("/users.json") } + assert_equal "application/json", response['Content-Type'] + end + + def test_content_type_when_registering_with_string_only + FakeWeb.register_uri(:get, "http://example.com/users.json", :body => '[{"username": "chrisk"}]') + response = Net::HTTP.start("example.com") { |query| query.get("/users.json") } + assert_equal '[{"username": "chrisk"}]', response.body + assert_nil response['Content-Type'] + end + + def test_cookies_when_registering_with_file_and_set_cookie_header + FakeWeb.register_uri(:get, "http://example.com/", :body => fixture_path("test_example.txt"), + :set_cookie => "user_id=1; example=yes") + response = Net::HTTP.start("example.com") { |query| query.get("/") } + assert_equal "test example content", response.body + assert_equal "user_id=1; example=yes", response['Set-Cookie'] + end + + def test_multiple_set_cookie_headers + FakeWeb.register_uri(:get, "http://example.com", :set_cookie => ["user_id=1", "example=yes"]) + response = Net::HTTP.start("example.com") { |query| query.get("/") } + assert_equal ["user_id=1", "example=yes"], response.get_fields('Set-Cookie') + assert_equal "user_id=1, example=yes", response['Set-Cookie'] + end + + def test_registering_with_baked_response_ignores_header_options + fake_response = Net::HTTPOK.new('1.1', '200', 'OK') + fake_response["Server"] = "Apache/1.3.27 (Unix)" + FakeWeb.register_uri(:get, "http://example.com/", :response => fake_response, + :server => "FakeWeb/1.2.3 (Ruby)") + response = Net::HTTP.start("example.com") { |query| query.get("/") } + assert_equal "200", response.code + assert_equal "OK", response.message + assert_equal "Apache/1.3.27 (Unix)", response["Server"] + end + + def test_headers_are_rotated_when_registering_with_response_rotation + FakeWeb.register_uri(:get, "http://example.com", + [{:body => 'test1', :expires => "Thu, 14 Jun 2009 16:00:00 GMT", + :content_type => "text/plain"}, + {:body => 'test2', :expires => "Thu, 14 Jun 2009 16:00:01 GMT"}]) + + first_response = second_response = nil + Net::HTTP.start("example.com") do |query| + first_response = query.get("/") + second_response = query.get("/") + end + assert_equal 'test1', first_response.body + assert_equal "Thu, 14 Jun 2009 16:00:00 GMT", first_response['Expires'] + assert_equal "text/plain", first_response['Content-Type'] + assert_equal 'test2', second_response.body + assert_equal "Thu, 14 Jun 2009 16:00:01 GMT", second_response['Expires'] + assert_nil second_response['Content-Type'] + end + + def test_registering_with_status_option_and_response_headers + FakeWeb.register_uri(:get, "http://example.com", :status => ["301", "Moved Permanently"], + :location => "http://www.example.com") + + response = Net::HTTP.start("example.com") { |query| query.get("/") } + assert_equal "301", response.code + assert_equal "Moved Permanently", response.message + assert_equal "http://www.example.com", response["Location"] + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_trailing_slashes.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_trailing_slashes.rb new file mode 100644 index 000000000..564d807dc --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_trailing_slashes.rb @@ -0,0 +1,53 @@ +require 'test_helper' + +class TestFakeWebTrailingSlashes < Test::Unit::TestCase + + def test_registering_root_without_slash_and_ask_predicate_method_with_slash + FakeWeb.register_uri(:get, "http://www.example.com", :body => "root") + assert FakeWeb.registered_uri?(:get, "http://www.example.com/") + end + + def test_registering_root_without_slash_and_request + FakeWeb.register_uri(:get, "http://www.example.com", :body => "root") + response = Net::HTTP.start("www.example.com") { |query| query.get('/') } + assert_equal "root", response.body + end + + def test_registering_root_with_slash_and_ask_predicate_method_without_slash + FakeWeb.register_uri(:get, "http://www.example.com/", :body => "root") + assert FakeWeb.registered_uri?(:get, "http://www.example.com") + end + + def test_registering_root_with_slash_and_request + FakeWeb.register_uri(:get, "http://www.example.com/", :body => "root") + response = Net::HTTP.start("www.example.com") { |query| query.get('/') } + assert_equal "root", response.body + end + + def test_registering_path_without_slash_and_ask_predicate_method_with_slash + FakeWeb.register_uri(:get, "http://www.example.com/users", :body => "User list") + assert !FakeWeb.registered_uri?(:get, "http://www.example.com/users/") + end + + def test_registering_path_without_slash_and_request_with_slash + FakeWeb.allow_net_connect = false + FakeWeb.register_uri(:get, "http://www.example.com/users", :body => "User list") + assert_raise FakeWeb::NetConnectNotAllowedError do + response = Net::HTTP.start("www.example.com") { |query| query.get('/users/') } + end + end + + def test_registering_path_with_slash_and_ask_predicate_method_without_slash + FakeWeb.register_uri(:get, "http://www.example.com/users/", :body => "User list") + assert !FakeWeb.registered_uri?(:get, "http://www.example.com/users") + end + + def test_registering_path_with_slash_and_request_without_slash + FakeWeb.allow_net_connect = false + FakeWeb.register_uri(:get, "http://www.example.com/users/", :body => "User list") + assert_raise FakeWeb::NetConnectNotAllowedError do + response = Net::HTTP.start("www.example.com") { |query| query.get('/users') } + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_utility.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_utility.rb new file mode 100644 index 000000000..891de875b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/test_utility.rb @@ -0,0 +1,83 @@ +require 'test_helper' + +class TestUtility < Test::Unit::TestCase + + def test_decode_userinfo_from_header_handles_basic_auth + authorization_header = "Basic dXNlcm5hbWU6c2VjcmV0" + userinfo = FakeWeb::Utility.decode_userinfo_from_header(authorization_header) + assert_equal "username:secret", userinfo + end + + def test_encode_unsafe_chars_in_userinfo_does_not_encode_userinfo_safe_punctuation + userinfo = "user;&=+$,:secret" + assert_equal userinfo, FakeWeb::Utility.encode_unsafe_chars_in_userinfo(userinfo) + end + + def test_encode_unsafe_chars_in_userinfo_does_not_encode_rfc_3986_unreserved_characters + userinfo = "-_.!~*'()abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789:secret" + assert_equal userinfo, FakeWeb::Utility.encode_unsafe_chars_in_userinfo(userinfo) + end + + def test_encode_unsafe_chars_in_userinfo_does_encode_other_characters + userinfo, safe_userinfo = 'us#rn@me:sec//ret?"', 'us%23rn%40me:sec%2F%2Fret%3F%22' + assert_equal safe_userinfo, FakeWeb::Utility.encode_unsafe_chars_in_userinfo(userinfo) + end + + def test_strip_default_port_from_uri_strips_80_from_http_with_path + uri = "http://example.com:80/foo/bar" + stripped_uri = FakeWeb::Utility.strip_default_port_from_uri(uri) + assert_equal "http://example.com/foo/bar", stripped_uri + end + + def test_strip_default_port_from_uri_strips_80_from_http_without_path + uri = "http://example.com:80" + stripped_uri = FakeWeb::Utility.strip_default_port_from_uri(uri) + assert_equal "http://example.com", stripped_uri + end + + def test_strip_default_port_from_uri_strips_443_from_https_without_path + uri = "https://example.com:443" + stripped_uri = FakeWeb::Utility.strip_default_port_from_uri(uri) + assert_equal "https://example.com", stripped_uri + end + + def test_strip_default_port_from_uri_strips_443_from_https + uri = "https://example.com:443/foo/bar" + stripped_uri = FakeWeb::Utility.strip_default_port_from_uri(uri) + assert_equal "https://example.com/foo/bar", stripped_uri + end + + def test_strip_default_port_from_uri_does_not_strip_8080_from_http + uri = "http://example.com:8080/foo/bar" + assert_equal uri, FakeWeb::Utility.strip_default_port_from_uri(uri) + end + + def test_strip_default_port_from_uri_does_not_strip_443_from_http + uri = "http://example.com:443/foo/bar" + assert_equal uri, FakeWeb::Utility.strip_default_port_from_uri(uri) + end + + def test_strip_default_port_from_uri_does_not_strip_80_from_query_string + uri = "http://example.com/?a=:80&b=c" + assert_equal uri, FakeWeb::Utility.strip_default_port_from_uri(uri) + end + + def test_strip_default_port_from_uri_does_not_modify_strings_that_do_not_start_with_http_or_https + uri = "httpz://example.com:80/" + assert_equal uri, FakeWeb::Utility.strip_default_port_from_uri(uri) + end + + def test_request_uri_as_string + http = Net::HTTP.new("www.example.com", 80) + request = Net::HTTP::Get.new("/index.html") + expected = "http://www.example.com:80/index.html" + assert_equal expected, FakeWeb::Utility.request_uri_as_string(http, request) + end + + def test_uri_escape_delegates_to_uri_parser_when_available + parsing_object = URI.const_defined?(:Parser) ? URI::Parser.any_instance : URI + parsing_object.expects(:escape).with("string", /unsafe/).returns("escaped") + assert_equal "escaped", FakeWeb::Utility.uri_escape("string", /unsafe/) + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/History.txt b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/History.txt new file mode 100644 index 000000000..e06bcd0a2 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/History.txt @@ -0,0 +1,59 @@ +== 0.0.1 2007-05-15 +* 1 major enhancement: + * Initial release + +== 0.1.2 2007-06-27 + +* No major changes. + +== 0.1.3 2007-07-09 + +* No change. + +== 0.1.4 2007-08-10 + +* r1442, todd, 2007-08-07 15:45:24 + * # 373, Add support in right_http_connection for bailing out to a block while + reading the HTTP response (to support GET streaming...) + +* r1411, todd, 2007-08-03 15:14:45 + * # 373, Stream uploads (PUTs) if the source is a file, stream, or anything + read()-able + +== 1.1.0 2007-08-15 +Initial public release + +== 1.2.0 2007-10-05 + +* r1867, konstantin, 2007-10-05 06:19:45 + * # 220, (re)open connection to server if none exists or connection params + have changed + +== 1.2.1 + +* r2648, konstantin, 01-24-08 11:12:00 + * net_fix.rb moved from right_aws gem to fix the problem with uploading the streamable + objects to S3 + +* r2764, konstantin, 02-08-08 00:05:00 +03:00 + * "RightAws: incompatible Net::HTTP monkey-patch" exception is raised if our net_fix + patch was overriden (by attachment_fu for example, to avoid this load attachment_fu + before loading the right_http_connection gem). + +== 1.2.2 + +* r3524, konstantin, 2008-04-17 11:35:42 +0400 + * Fixed a problem with incorrect error handling (connection retries always failed). + +== 1.2.3 + +- Added support for setting retry & timeout parameters in the constructor +- Improve handling of data streams during upload: if there is a failure and a retry, reset + the seek pointer for the subsequent re-request + +== 1.2.4 + +* r4984, konstantin, 2008-08-11 14:49:18 +0400 + * fixed a bug: + diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Manifest.txt b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Manifest.txt new file mode 100644 index 000000000..20f193b21 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Manifest.txt @@ -0,0 +1,7 @@ +History.txt +Manifest.txt +README.txt +Rakefile +lib/net_fix.rb +lib/right_http_connection.rb +setup.rb diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/README.txt b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/README.txt new file mode 100644 index 000000000..46c97e57a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/README.txt @@ -0,0 +1,54 @@ +RightScale::HttpConnection + by RightScale, Inc. + www.RightScale.com + +== DESCRIPTION: + +Rightscale::HttpConnection is a robust HTTP/S library. It implements a retry +algorithm for low-level network errors. + +== FEATURES: + +- provides put/get streaming +- does configurable retries on connect and read timeouts, DNS failures, etc. +- HTTPS certificate checking + +== SYNOPSIS: + + +== REQUIREMENTS: + +- 2/11/08: If you use RightScale::HttpConnection in conjunction with attachment_fu, the + HttpConnection gem must be included (using the require statement) AFTER + attachment_fu. + This is due to a conflict between the HttpConnection gem and another + gem required by attachment_fu. + + + +== INSTALL: + +sudo gem install right_http_connection + +== LICENSE: + +Copyright (c) 2007-2008 RightScale, Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Rakefile b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Rakefile new file mode 100644 index 000000000..0ae50977c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/Rakefile @@ -0,0 +1,103 @@ +require 'rubygems' +require 'rake' +require 'rake/clean' +require 'rake/testtask' +require 'rake/packagetask' +require 'rake/gempackagetask' +require 'rake/rdoctask' +require 'rake/contrib/rubyforgepublisher' +require 'fileutils' +require 'hoe' +include FileUtils +require File.join(File.dirname(__FILE__), 'lib', 'right_http_connection') + +AUTHOR = 'RightScale' # can also be an array of Authors +EMAIL = "rubygems@rightscale.com" +DESCRIPTION = "RightScale's robust HTTP/S connection module" +GEM_NAME = 'right_http_connection' # what ppl will type to install your gem +RUBYFORGE_PROJECT = 'rightscale' # The unix name for your project +HOMEPATH = "http://#{RUBYFORGE_PROJECT}.rubyforge.org" +DOWNLOAD_PATH = "http://rubyforge.org/projects/#{RUBYFORGE_PROJECT}" + +NAME = "right_http_connection" +REV = nil # UNCOMMENT IF REQUIRED: File.read(".svn/entries")[/committed-rev="(d+)"/, 1] rescue nil +VERS = RightHttpConnection::VERSION::STRING + (REV ? ".#{REV}" : "") +CLEAN.include ['**/.*.sw?', '*.gem', '.config', '**/.DS_Store'] +RDOC_OPTS = ['--quiet', '--title', 'right_http_connection documentation', + "--opname", "index.html", + "--line-numbers", + "--main", "README", + "--inline-source"] + +# Suppress Hoe's self-inclusion as a dependency for our Gem. This also keeps +# Rake & rubyforge out of the dependency list. Users must manually install +# these gems to run tests, etc. +# TRB 2/19/09: also do this for the extra_dev_deps array present in newer hoes. +# Older versions of RubyGems will try to install developer-dependencies as +# required runtime dependencies.... +class Hoe + def extra_deps + @extra_deps.reject do |x| + Array(x).first == 'hoe' + end + end + def extra_dev_deps + @extra_dev_deps.reject do |x| + Array(x).first == 'hoe' + end + end +end + +# Generate all the Rake tasks +# Run 'rake -T' to see list of generated tasks (from gem root directory) +hoe = Hoe.new(GEM_NAME, VERS) do |p| + p.author = AUTHOR + p.description = DESCRIPTION + p.email = EMAIL + p.summary = DESCRIPTION + p.url = HOMEPATH + p.rubyforge_name = RUBYFORGE_PROJECT if RUBYFORGE_PROJECT + p.test_globs = ["test/**/test_*.rb"] + p.clean_globs = CLEAN #An array of file patterns to delete on clean. + p.remote_rdoc_dir = "right_http_gem_doc" + + # == Optional + p.changes = p.paragraphs_of("History.txt", 0..1).join("\n\n") + #p.extra_deps = [] # An array of rubygem dependencies [name, version], e.g. [ ['active_support', '>= 1.3.1'] ] + #p.spec_extras = {} # A hash of extra values to set in the gemspec. +end + + +desc 'Generate website files' +task :website_generate do + Dir['website/**/*.txt'].each do |txt| + sh %{ ruby scripts/txt2html #{txt} > #{txt.gsub(/txt$/,'html')} } + end +end + +desc 'Upload website files to rubyforge' +task :website_upload do + config = YAML.load(File.read(File.expand_path("~/.rubyforge/user-config.yml"))) + host = "#{config["username"]}@rubyforge.org" + remote_dir = "/var/www/gforge-projects/#{RUBYFORGE_PROJECT}/" + # remote_dir = "/var/www/gforge-projects/#{RUBYFORGE_PROJECT}/#{GEM_NAME}" + local_dir = 'website' + sh %{rsync -av #{local_dir}/ #{host}:#{remote_dir}} +end + +desc 'Generate and upload website files' +task :website => [:website_generate, :website_upload] + +desc 'Release the website and new gem version' +task :deploy => [:check_version, :website, :release] + +task :check_version do + unless ENV['VERSION'] + puts 'Must pass a VERSION=x.y.z release version' + exit + end + unless ENV['VERSION'] == VERS + puts "Please update your version.rb to match the release version, currently #{VERS}" + exit + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/net_fix.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/net_fix.rb new file mode 100644 index 000000000..ad54f8a2a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/net_fix.rb @@ -0,0 +1,160 @@ +# +# Copyright (c) 2008 RightScale Inc +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# + +# Net::HTTP and Net::HTTPGenericRequest fixes to support 100-continue on +# POST and PUT. The request must have 'expect' field set to '100-continue'. + + +module Net + + class BufferedIO #:nodoc: + # Monkey-patch Net::BufferedIO to read > 1024 bytes from the socket at a time + + # Default size (in bytes) of the max read from a socket into the user space read buffers for socket IO + DEFAULT_SOCKET_READ_SIZE = 16*1024 + + @@socket_read_size = DEFAULT_SOCKET_READ_SIZE + + def self.socket_read_size=(readsize) + if(readsize <= 0) + return + end + @@socket_read_size = readsize + end + + def self.socket_read_size?() + @@socket_read_size + end + + def rbuf_fill + timeout(@read_timeout) { + @rbuf << @io.sysread(@@socket_read_size) + } + end + end + + + #-- Net::HTTPGenericRequest -- + + class HTTPGenericRequest + # Monkey-patch Net::HTTPGenericRequest to read > 1024 bytes from the local data + # source at a time (used in streaming PUTs) + + # Default size (in bytes) of the max read from a local source (File, String, + # etc.) to the user space write buffers for socket IO. + DEFAULT_LOCAL_READ_SIZE = 16*1024 + + @@local_read_size = DEFAULT_LOCAL_READ_SIZE + + def self.local_read_size=(readsize) + if(readsize <= 0) + return + end + @@local_read_size = readsize + end + + def self.local_read_size?() + @@local_read_size + end + + def exec(sock, ver, path, send_only=nil) #:nodoc: internal use only + if @body + send_request_with_body sock, ver, path, @body, send_only + elsif @body_stream + send_request_with_body_stream sock, ver, path, @body_stream, send_only + else + write_header(sock, ver, path) + end + end + + private + + def send_request_with_body(sock, ver, path, body, send_only=nil) + self.content_length = body.length + delete 'Transfer-Encoding' + supply_default_content_type + write_header(sock, ver, path) unless send_only == :body + sock.write(body) unless send_only == :header + end + + def send_request_with_body_stream(sock, ver, path, f, send_only=nil) + unless content_length() or chunked? + raise ArgumentError, + "Content-Length not given and Transfer-Encoding is not `chunked'" + end + supply_default_content_type + write_header(sock, ver, path) unless send_only == :body + unless send_only == :header + if chunked? + while s = f.read(@@local_read_size) + sock.write(sprintf("%x\r\n", s.length) << s << "\r\n") + end + sock.write "0\r\n\r\n" + else + while s = f.read(@@local_read_size) + sock.write s + end + end + end + end + end + + + #-- Net::HTTP -- + + class HTTP + def request(req, body = nil, &block) # :yield: +response+ + unless started? + start { + req['connection'] ||= 'close' + return request(req, body, &block) + } + end + if proxy_user() + unless use_ssl? + req.proxy_basic_auth proxy_user(), proxy_pass() + end + end + # set body + req.set_body_internal body + begin_transport req + # if we expect 100-continue then send a header first + send_only = ((req.is_a?(Post)||req.is_a?(Put)) && (req['expect']=='100-continue')) ? :header : nil + req.exec @socket, @curr_http_version, edit_path(req.path), send_only + begin + res = HTTPResponse.read_new(@socket) + # if we expected 100-continue then send a body + if res.is_a?(HTTPContinue) && send_only && req['content-length'].to_i > 0 + req.exec @socket, @curr_http_version, edit_path(req.path), :body + end + end while res.kind_of?(HTTPContinue) + res.reading_body(@socket, req.response_body_permitted?) { + yield res if block_given? + } + end_transport req, res + res + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/right_http_connection.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/right_http_connection.rb new file mode 100644 index 000000000..0151ae685 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/lib/right_http_connection.rb @@ -0,0 +1,435 @@ +# +# Copyright (c) 2007-2008 RightScale Inc +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +require "net/https" +require "uri" +require "time" +require "logger" + +$:.unshift(File.dirname(__FILE__)) +require "net_fix" + + +module RightHttpConnection #:nodoc: + module VERSION #:nodoc: + MAJOR = 1 + MINOR = 2 + TINY = 4 + + STRING = [MAJOR, MINOR, TINY].join('.') + end +end + + +module Rightscale + +=begin rdoc +HttpConnection maintains a persistent HTTP connection to a remote +server. Each instance maintains its own unique connection to the +HTTP server. HttpConnection makes a best effort to receive a proper +HTTP response from the server, although it does not guarantee that +this response contains a HTTP Success code. + +On low-level errors (TCP/IP errors) HttpConnection invokes a reconnect +and retry algorithm. Note that although each HttpConnection object +has its own connection to the HTTP server, error handling is shared +across all connections to a server. For example, if there are three +connections to www.somehttpserver.com, a timeout error on one of those +connections will cause all three connections to break and reconnect. +A connection will not break and reconnect, however, unless a request +becomes active on it within a certain amount of time after the error +(as specified by HTTP_CONNECTION_RETRY_DELAY). An idle connection will not +break even if other connections to the same server experience errors. + +A HttpConnection will retry a request a certain number of times (as +defined by HTTP_CONNNECTION_RETRY_COUNT). If all the retries fail, +an exception is thrown and all HttpConnections associated with a +server enter a probationary period defined by HTTP_CONNECTION_RETRY_DELAY. +If the user makes a new request subsequent to entering probation, +the request will fail immediately with the same exception thrown +on probation entry. This is so that if the HTTP server has gone +down, not every subsequent request must wait for a connect timeout +before failing. After the probation period expires, the internal +state of the HttpConnection is reset and subsequent requests have +the full number of potential reconnects and retries available to +them. +=end + + class HttpConnection + + # Number of times to retry the request after encountering the first error + HTTP_CONNECTION_RETRY_COUNT = 3 + # Throw a Timeout::Error if a connection isn't established within this number of seconds + HTTP_CONNECTION_OPEN_TIMEOUT = 5 + # Throw a Timeout::Error if no data have been read on this connnection within this number of seconds + HTTP_CONNECTION_READ_TIMEOUT = 120 + # Length of the post-error probationary period during which all requests will fail + HTTP_CONNECTION_RETRY_DELAY = 15 + + #-------------------- + # class methods + #-------------------- + # + @@params = {} + @@params[:http_connection_retry_count] = HTTP_CONNECTION_RETRY_COUNT + @@params[:http_connection_open_timeout] = HTTP_CONNECTION_OPEN_TIMEOUT + @@params[:http_connection_read_timeout] = HTTP_CONNECTION_READ_TIMEOUT + @@params[:http_connection_retry_delay] = HTTP_CONNECTION_RETRY_DELAY + + # Query the global (class-level) parameters: + # + # :user_agent => 'www.HostName.com' # String to report as HTTP User agent + # :ca_file => 'path_to_file' # Path to a CA certification file in PEM format. The file can contain several CA certificates. If this parameter isn't set, HTTPS certs won't be verified. + # :logger => Logger object # If omitted, HttpConnection logs to STDOUT + # :exception => Exception to raise # The type of exception to raise + # # if a request repeatedly fails. RuntimeError is raised if this parameter is omitted. + # :http_connection_retry_count # by default == Rightscale::HttpConnection::HTTP_CONNECTION_RETRY_COUNT + # :http_connection_open_timeout # by default == Rightscale::HttpConnection::HTTP_CONNECTION_OPEN_TIMEOUT + # :http_connection_read_timeout # by default == Rightscale::HttpConnection::HTTP_CONNECTION_READ_TIMEOUT + # :http_connection_retry_delay # by default == Rightscale::HttpConnection::HTTP_CONNECTION_RETRY_DELAY + def self.params + @@params + end + + # Set the global (class-level) parameters + def self.params=(params) + @@params = params + end + + #------------------ + # instance methods + #------------------ + attr_accessor :http + attr_accessor :server + attr_accessor :params # see @@params + attr_accessor :logger + + # Params hash: + # :user_agent => 'www.HostName.com' # String to report as HTTP User agent + # :ca_file => 'path_to_file' # A path of a CA certification file in PEM format. The file can contain several CA certificates. + # :logger => Logger object # If omitted, HttpConnection logs to STDOUT + # :exception => Exception to raise # The type of exception to raise if a request repeatedly fails. RuntimeError is raised if this parameter is omitted. + # :http_connection_retry_count # by default == Rightscale::HttpConnection.params[:http_connection_retry_count] + # :http_connection_open_timeout # by default == Rightscale::HttpConnection.params[:http_connection_open_timeout] + # :http_connection_read_timeout # by default == Rightscale::HttpConnection.params[:http_connection_read_timeout] + # :http_connection_retry_delay # by default == Rightscale::HttpConnection.params[:http_connection_retry_delay] + # + def initialize(params={}) + @params = params + @params[:http_connection_retry_count] ||= @@params[:http_connection_retry_count] + @params[:http_connection_open_timeout] ||= @@params[:http_connection_open_timeout] + @params[:http_connection_read_timeout] ||= @@params[:http_connection_read_timeout] + @params[:http_connection_retry_delay] ||= @@params[:http_connection_retry_delay] + @http = nil + @server = nil + @logger = get_param(:logger) || + (RAILS_DEFAULT_LOGGER if defined?(RAILS_DEFAULT_LOGGER)) || + Logger.new(STDOUT) + end + + def get_param(name) + @params[name] || @@params[name] + end + + # Query for the maximum size (in bytes) of a single read from the underlying + # socket. For bulk transfer, especially over fast links, this is value is + # critical to performance. + def socket_read_size? + Net::BufferedIO.socket_read_size? + end + + # Set the maximum size (in bytes) of a single read from the underlying + # socket. For bulk transfer, especially over fast links, this is value is + # critical to performance. + def socket_read_size=(newsize) + Net::BufferedIO.socket_read_size=(newsize) + end + + # Query for the maximum size (in bytes) of a single read from local data + # sources like files. This is important, for example, in a streaming PUT of a + # large buffer. + def local_read_size? + Net::HTTPGenericRequest.local_read_size? + end + + # Set the maximum size (in bytes) of a single read from local data + # sources like files. This can be used to tune the performance of, for example, a streaming PUT of a + # large buffer. + def local_read_size=(newsize) + Net::HTTPGenericRequest.local_read_size=(newsize) + end + + private + #-------------- + # Retry state - Keep track of errors on a per-server basis + #-------------- + @@state = {} # retry state indexed by server: consecutive error count, error time, and error + @@eof = {} + + # number of consecutive errors seen for server, 0 all is ok + def error_count + @@state[@server] ? @@state[@server][:count] : 0 + end + + # time of last error for server, nil if all is ok + def error_time + @@state[@server] && @@state[@server][:time] + end + + # message for last error for server, "" if all is ok + def error_message + @@state[@server] ? @@state[@server][:message] : "" + end + + # add an error for a server + def error_add(message) + @@state[@server] = { :count => error_count+1, :time => Time.now, :message => message } + end + + # reset the error state for a server (i.e. a request succeeded) + def error_reset + @@state.delete(@server) + end + + # Error message stuff... + def banana_message + return "#{@server} temporarily unavailable: (#{error_message})" + end + + def err_header + return "#{self.class.name} :" + end + + # Adds new EOF timestamp. + # Returns the number of seconds to wait before new conection retry: + # 0.5, 1, 2, 4, 8 + def add_eof + (@@eof[@server] ||= []).unshift Time.now + 0.25 * 2 ** @@eof[@server].size + end + + # Returns first EOF timestamp or nul if have no EOFs being tracked. + def eof_time + @@eof[@server] && @@eof[@server].last + end + + # Returns true if we are receiving EOFs during last @params[:http_connection_retry_delay] seconds + # and there were no successful response from server + def raise_on_eof_exception? + @@eof[@server].blank? ? false : ( (Time.now.to_i-@params[:http_connection_retry_delay]) > @@eof[@server].last.to_i ) + end + + # Reset a list of EOFs for this server. + # This is being called when we have got an successful response from server. + def eof_reset + @@eof.delete(@server) + end + + # Detects if an object is 'streamable' - can we read from it, and can we know the size? + def setup_streaming(request) + if(request.body && request.body.respond_to?(:read)) + body = request.body + request.content_length = body.respond_to?(:lstat) ? body.lstat.size : body.size + request.body_stream = request.body + true + end + end + + def get_fileptr_offset(request_params) + request_params[:request].body.pos + rescue Exception => e + # Probably caught this because the body doesn't support the pos() method, like if it is a socket. + # Just return 0 and get on with life. + 0 + end + + def reset_fileptr_offset(request, offset = 0) + if(request.body_stream && request.body_stream.respond_to?(:pos)) + begin + request.body_stream.pos = offset + rescue Exception => e + @logger.warn("Failed file pointer reset; aborting HTTP retries." + + " -- #{err_header} #{e.inspect}") + raise e + end + end + end + + # Start a fresh connection. The object closes any existing connection and + # opens a new one. + def start(request_params) + # close the previous if exists + finish + # create new connection + @server = request_params[:server] + @port = request_params[:port] + @protocol = request_params[:protocol] + + @logger.info("Opening new #{@protocol.upcase} connection to #@server:#@port") + @http = Net::HTTP.new(@server, @port) + @http.open_timeout = @params[:http_connection_open_timeout] + @http.read_timeout = @params[:http_connection_read_timeout] + + if @protocol == 'https' + verifyCallbackProc = Proc.new{ |ok, x509_store_ctx| + code = x509_store_ctx.error + msg = x509_store_ctx.error_string + #debugger + @logger.warn("##### #{@server} certificate verify failed: #{msg}") unless code == 0 + true + } + @http.use_ssl = true + ca_file = get_param(:ca_file) + if ca_file + @http.verify_mode = OpenSSL::SSL::VERIFY_PEER + @http.verify_callback = verifyCallbackProc + @http.ca_file = ca_file + end + end + # open connection + @http.start + end + + public + +=begin rdoc + Send HTTP request to server + + request_params hash: + :server => 'www.HostName.com' # Hostname or IP address of HTTP server + :port => '80' # Port of HTTP server + :protocol => 'https' # http and https are supported on any port + :request => 'requeststring' # Fully-formed HTTP request to make + + Raises RuntimeError, Interrupt, and params[:exception] (if specified in new). + +=end + def request(request_params, &block) + # We save the offset here so that if we need to retry, we can return the file pointer to its initial position + mypos = get_fileptr_offset(request_params) + loop do + # if we are inside a delay between retries: no requests this time! + if error_count > @params[:http_connection_retry_count] && + error_time + @params[:http_connection_retry_delay] > Time.now + # store the message (otherwise it will be lost after error_reset and + # we will raise an exception with an empty text) + banana_message_text = banana_message + @logger.warn("#{err_header} re-raising same error: #{banana_message_text} " + + "-- error count: #{error_count}, error age: #{Time.now.to_i - error_time.to_i}") + exception = get_param(:exception) || RuntimeError + raise exception.new(banana_message_text) + end + + # try to connect server(if connection does not exist) and get response data + begin + request_params[:protocol] ||= (request_params[:port] == 443 ? 'https' : 'http') + + request = request_params[:request] + request['User-Agent'] = get_param(:user_agent) || '' + + # (re)open connection to server if none exists or params has changed + unless @http && + @http.started? && + @server == request_params[:server] && + @port == request_params[:port] && + @protocol == request_params[:protocol] + start(request_params) + end + + # Detect if the body is a streamable object like a file or socket. If so, stream that + # bad boy. + setup_streaming(request) + response = @http.request(request, &block) + + error_reset + eof_reset + return response + + # We treat EOF errors and the timeout/network errors differently. Both + # are tracked in different statistics blocks. Note below that EOF + # errors will sleep for a certain (exponentially increasing) period. + # Other errors don't sleep because there is already an inherent delay + # in them; connect and read timeouts (for example) have already + # 'slept'. It is still not clear which way we should treat errors + # like RST and resolution failures. For now, there is no additional + # delay for these errors although this may change in the future. + + # EOFError means the server closed the connection on us. + rescue EOFError => e + @logger.debug("#{err_header} server #{@server} closed connection") + @http = nil + + # if we have waited long enough - raise an exception... + if raise_on_eof_exception? + exception = get_param(:exception) || RuntimeError + @logger.warn("#{err_header} raising #{exception} due to permanent EOF being received from #{@server}, error age: #{Time.now.to_i - eof_time.to_i}") + raise exception.new("Permanent EOF is being received from #{@server}.") + else + # ... else just sleep a bit before new retry + sleep(add_eof) + # We will be retrying the request, so reset the file pointer + reset_fileptr_offset(request, mypos) + end + rescue Exception => e # See comment at bottom for the list of errors seen... + @http = nil + # if ctrl+c is pressed - we have to reraise exception to terminate proggy + if e.is_a?(Interrupt) && !( e.is_a?(Errno::ETIMEDOUT) || e.is_a?(Timeout::Error)) + @logger.debug( "#{err_header} request to server #{@server} interrupted by ctrl-c") + raise + elsif e.is_a?(ArgumentError) && e.message.include?('wrong number of arguments (5 for 4)') + # seems our net_fix patch was overriden... + exception = get_param(:exception) || RuntimeError + raise exception.new('incompatible Net::HTTP monkey-patch') + end + # oops - we got a banana: log it + error_add(e.message) + @logger.warn("#{err_header} request failure count: #{error_count}, exception: #{e.inspect}") + + # We will be retrying the request, so reset the file pointer + reset_fileptr_offset(request, mypos) + + end + end + end + + def finish(reason = '') + if @http && @http.started? + reason = ", reason: '#{reason}'" unless reason.blank? + @logger.info("Closing #{@http.use_ssl? ? 'HTTPS' : 'HTTP'} connection to #{@http.address}:#{@http.port}#{reason}") + @http.finish + end + end + + # Errors received during testing: + # + # # + # # + # # + # # + # # + # # + # # + end + +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/setup.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/setup.rb new file mode 100644 index 000000000..424a5f37c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/right_http_connection-1.2.4/setup.rb @@ -0,0 +1,1585 @@ +# +# setup.rb +# +# Copyright (c) 2000-2005 Minero Aoki +# +# This program is free software. +# You can distribute/modify this program under the terms of +# the GNU LGPL, Lesser General Public License version 2.1. +# + +unless Enumerable.method_defined?(:map) # Ruby 1.4.6 + module Enumerable + alias map collect + end +end + +unless File.respond_to?(:read) # Ruby 1.6 + def File.read(fname) + open(fname) {|f| + return f.read + } + end +end + +unless Errno.const_defined?(:ENOTEMPTY) # Windows? + module Errno + class ENOTEMPTY + # We do not raise this exception, implementation is not needed. + end + end +end + +def File.binread(fname) + open(fname, 'rb') {|f| + return f.read + } +end + +# for corrupted Windows' stat(2) +def File.dir?(path) + File.directory?((path[-1,1] == '/') ? path : path + '/') +end + + +class ConfigTable + + include Enumerable + + def initialize(rbconfig) + @rbconfig = rbconfig + @items = [] + @table = {} + # options + @install_prefix = nil + @config_opt = nil + @verbose = true + @no_harm = false + end + + attr_accessor :install_prefix + attr_accessor :config_opt + + attr_writer :verbose + + def verbose? + @verbose + end + + attr_writer :no_harm + + def no_harm? + @no_harm + end + + def [](key) + lookup(key).resolve(self) + end + + def []=(key, val) + lookup(key).set val + end + + def names + @items.map {|i| i.name } + end + + def each(&block) + @items.each(&block) + end + + def key?(name) + @table.key?(name) + end + + def lookup(name) + @table[name] or setup_rb_error "no such config item: #{name}" + end + + def add(item) + @items.push item + @table[item.name] = item + end + + def remove(name) + item = lookup(name) + @items.delete_if {|i| i.name == name } + @table.delete_if {|name, i| i.name == name } + item + end + + def load_script(path, inst = nil) + if File.file?(path) + MetaConfigEnvironment.new(self, inst).instance_eval File.read(path), path + end + end + + def savefile + '.config' + end + + def load_savefile + begin + File.foreach(savefile()) do |line| + k, v = *line.split(/=/, 2) + self[k] = v.strip + end + rescue Errno::ENOENT + setup_rb_error $!.message + "\n#{File.basename($0)} config first" + end + end + + def save + @items.each {|i| i.value } + File.open(savefile(), 'w') {|f| + @items.each do |i| + f.printf "%s=%s\n", i.name, i.value if i.value? and i.value + end + } + end + + def load_standard_entries + standard_entries(@rbconfig).each do |ent| + add ent + end + end + + def standard_entries(rbconfig) + c = rbconfig + + rubypath = File.join(c['bindir'], c['ruby_install_name'] + c['EXEEXT']) + + major = c['MAJOR'].to_i + minor = c['MINOR'].to_i + teeny = c['TEENY'].to_i + version = "#{major}.#{minor}" + + # ruby ver. >= 1.4.4? + newpath_p = ((major >= 2) or + ((major == 1) and + ((minor >= 5) or + ((minor == 4) and (teeny >= 4))))) + + if c['rubylibdir'] + # V > 1.6.3 + libruby = "#{c['prefix']}/lib/ruby" + librubyver = c['rubylibdir'] + librubyverarch = c['archdir'] + siteruby = c['sitedir'] + siterubyver = c['sitelibdir'] + siterubyverarch = c['sitearchdir'] + elsif newpath_p + # 1.4.4 <= V <= 1.6.3 + libruby = "#{c['prefix']}/lib/ruby" + librubyver = "#{c['prefix']}/lib/ruby/#{version}" + librubyverarch = "#{c['prefix']}/lib/ruby/#{version}/#{c['arch']}" + siteruby = c['sitedir'] + siterubyver = "$siteruby/#{version}" + siterubyverarch = "$siterubyver/#{c['arch']}" + else + # V < 1.4.4 + libruby = "#{c['prefix']}/lib/ruby" + librubyver = "#{c['prefix']}/lib/ruby/#{version}" + librubyverarch = "#{c['prefix']}/lib/ruby/#{version}/#{c['arch']}" + siteruby = "#{c['prefix']}/lib/ruby/#{version}/site_ruby" + siterubyver = siteruby + siterubyverarch = "$siterubyver/#{c['arch']}" + end + parameterize = lambda {|path| + path.sub(/\A#{Regexp.quote(c['prefix'])}/, '$prefix') + } + + if arg = c['configure_args'].split.detect {|arg| /--with-make-prog=/ =~ arg } + makeprog = arg.sub(/'/, '').split(/=/, 2)[1] + else + makeprog = 'make' + end + + [ + ExecItem.new('installdirs', 'std/site/home', + 'std: install under libruby; site: install under site_ruby; home: install under $HOME')\ + {|val, table| + case val + when 'std' + table['rbdir'] = '$librubyver' + table['sodir'] = '$librubyverarch' + when 'site' + table['rbdir'] = '$siterubyver' + table['sodir'] = '$siterubyverarch' + when 'home' + setup_rb_error '$HOME was not set' unless ENV['HOME'] + table['prefix'] = ENV['HOME'] + table['rbdir'] = '$libdir/ruby' + table['sodir'] = '$libdir/ruby' + end + }, + PathItem.new('prefix', 'path', c['prefix'], + 'path prefix of target environment'), + PathItem.new('bindir', 'path', parameterize.call(c['bindir']), + 'the directory for commands'), + PathItem.new('libdir', 'path', parameterize.call(c['libdir']), + 'the directory for libraries'), + PathItem.new('datadir', 'path', parameterize.call(c['datadir']), + 'the directory for shared data'), + PathItem.new('mandir', 'path', parameterize.call(c['mandir']), + 'the directory for man pages'), + PathItem.new('sysconfdir', 'path', parameterize.call(c['sysconfdir']), + 'the directory for system configuration files'), + PathItem.new('localstatedir', 'path', parameterize.call(c['localstatedir']), + 'the directory for local state data'), + PathItem.new('libruby', 'path', libruby, + 'the directory for ruby libraries'), + PathItem.new('librubyver', 'path', librubyver, + 'the directory for standard ruby libraries'), + PathItem.new('librubyverarch', 'path', librubyverarch, + 'the directory for standard ruby extensions'), + PathItem.new('siteruby', 'path', siteruby, + 'the directory for version-independent aux ruby libraries'), + PathItem.new('siterubyver', 'path', siterubyver, + 'the directory for aux ruby libraries'), + PathItem.new('siterubyverarch', 'path', siterubyverarch, + 'the directory for aux ruby binaries'), + PathItem.new('rbdir', 'path', '$siterubyver', + 'the directory for ruby scripts'), + PathItem.new('sodir', 'path', '$siterubyverarch', + 'the directory for ruby extentions'), + PathItem.new('rubypath', 'path', rubypath, + 'the path to set to #! line'), + ProgramItem.new('rubyprog', 'name', rubypath, + 'the ruby program using for installation'), + ProgramItem.new('makeprog', 'name', makeprog, + 'the make program to compile ruby extentions'), + SelectItem.new('shebang', 'all/ruby/never', 'ruby', + 'shebang line (#!) editing mode'), + BoolItem.new('without-ext', 'yes/no', 'no', + 'does not compile/install ruby extentions') + ] + end + private :standard_entries + + def load_multipackage_entries + multipackage_entries().each do |ent| + add ent + end + end + + def multipackage_entries + [ + PackageSelectionItem.new('with', 'name,name...', '', 'ALL', + 'package names that you want to install'), + PackageSelectionItem.new('without', 'name,name...', '', 'NONE', + 'package names that you do not want to install') + ] + end + private :multipackage_entries + + ALIASES = { + 'std-ruby' => 'librubyver', + 'stdruby' => 'librubyver', + 'rubylibdir' => 'librubyver', + 'archdir' => 'librubyverarch', + 'site-ruby-common' => 'siteruby', # For backward compatibility + 'site-ruby' => 'siterubyver', # For backward compatibility + 'bin-dir' => 'bindir', + 'bin-dir' => 'bindir', + 'rb-dir' => 'rbdir', + 'so-dir' => 'sodir', + 'data-dir' => 'datadir', + 'ruby-path' => 'rubypath', + 'ruby-prog' => 'rubyprog', + 'ruby' => 'rubyprog', + 'make-prog' => 'makeprog', + 'make' => 'makeprog' + } + + def fixup + ALIASES.each do |ali, name| + @table[ali] = @table[name] + end + @items.freeze + @table.freeze + @options_re = /\A--(#{@table.keys.join('|')})(?:=(.*))?\z/ + end + + def parse_opt(opt) + m = @options_re.match(opt) or setup_rb_error "config: unknown option #{opt}" + m.to_a[1,2] + end + + def dllext + @rbconfig['DLEXT'] + end + + def value_config?(name) + lookup(name).value? + end + + class Item + def initialize(name, template, default, desc) + @name = name.freeze + @template = template + @value = default + @default = default + @description = desc + end + + attr_reader :name + attr_reader :description + + attr_accessor :default + alias help_default default + + def help_opt + "--#{@name}=#{@template}" + end + + def value? + true + end + + def value + @value + end + + def resolve(table) + @value.gsub(%r<\$([^/]+)>) { table[$1] } + end + + def set(val) + @value = check(val) + end + + private + + def check(val) + setup_rb_error "config: --#{name} requires argument" unless val + val + end + end + + class BoolItem < Item + def config_type + 'bool' + end + + def help_opt + "--#{@name}" + end + + private + + def check(val) + return 'yes' unless val + case val + when /\Ay(es)?\z/i, /\At(rue)?\z/i then 'yes' + when /\An(o)?\z/i, /\Af(alse)\z/i then 'no' + else + setup_rb_error "config: --#{@name} accepts only yes/no for argument" + end + end + end + + class PathItem < Item + def config_type + 'path' + end + + private + + def check(path) + setup_rb_error "config: --#{@name} requires argument" unless path + path[0,1] == '$' ? path : File.expand_path(path) + end + end + + class ProgramItem < Item + def config_type + 'program' + end + end + + class SelectItem < Item + def initialize(name, selection, default, desc) + super + @ok = selection.split('/') + end + + def config_type + 'select' + end + + private + + def check(val) + unless @ok.include?(val.strip) + setup_rb_error "config: use --#{@name}=#{@template} (#{val})" + end + val.strip + end + end + + class ExecItem < Item + def initialize(name, selection, desc, &block) + super name, selection, nil, desc + @ok = selection.split('/') + @action = block + end + + def config_type + 'exec' + end + + def value? + false + end + + def resolve(table) + setup_rb_error "$#{name()} wrongly used as option value" + end + + undef set + + def evaluate(val, table) + v = val.strip.downcase + unless @ok.include?(v) + setup_rb_error "invalid option --#{@name}=#{val} (use #{@template})" + end + @action.call v, table + end + end + + class PackageSelectionItem < Item + def initialize(name, template, default, help_default, desc) + super name, template, default, desc + @help_default = help_default + end + + attr_reader :help_default + + def config_type + 'package' + end + + private + + def check(val) + unless File.dir?("packages/#{val}") + setup_rb_error "config: no such package: #{val}" + end + val + end + end + + class MetaConfigEnvironment + def initialize(config, installer) + @config = config + @installer = installer + end + + def config_names + @config.names + end + + def config?(name) + @config.key?(name) + end + + def bool_config?(name) + @config.lookup(name).config_type == 'bool' + end + + def path_config?(name) + @config.lookup(name).config_type == 'path' + end + + def value_config?(name) + @config.lookup(name).config_type != 'exec' + end + + def add_config(item) + @config.add item + end + + def add_bool_config(name, default, desc) + @config.add BoolItem.new(name, 'yes/no', default ? 'yes' : 'no', desc) + end + + def add_path_config(name, default, desc) + @config.add PathItem.new(name, 'path', default, desc) + end + + def set_config_default(name, default) + @config.lookup(name).default = default + end + + def remove_config(name) + @config.remove(name) + end + + # For only multipackage + def packages + raise '[setup.rb fatal] multi-package metaconfig API packages() called for single-package; contact application package vendor' unless @installer + @installer.packages + end + + # For only multipackage + def declare_packages(list) + raise '[setup.rb fatal] multi-package metaconfig API declare_packages() called for single-package; contact application package vendor' unless @installer + @installer.packages = list + end + end + +end # class ConfigTable + + +# This module requires: #verbose?, #no_harm? +module FileOperations + + def mkdir_p(dirname, prefix = nil) + dirname = prefix + File.expand_path(dirname) if prefix + $stderr.puts "mkdir -p #{dirname}" if verbose? + return if no_harm? + + # Does not check '/', it's too abnormal. + dirs = File.expand_path(dirname).split(%r<(?=/)>) + if /\A[a-z]:\z/i =~ dirs[0] + disk = dirs.shift + dirs[0] = disk + dirs[0] + end + dirs.each_index do |idx| + path = dirs[0..idx].join('') + Dir.mkdir path unless File.dir?(path) + end + end + + def rm_f(path) + $stderr.puts "rm -f #{path}" if verbose? + return if no_harm? + force_remove_file path + end + + def rm_rf(path) + $stderr.puts "rm -rf #{path}" if verbose? + return if no_harm? + remove_tree path + end + + def remove_tree(path) + if File.symlink?(path) + remove_file path + elsif File.dir?(path) + remove_tree0 path + else + force_remove_file path + end + end + + def remove_tree0(path) + Dir.foreach(path) do |ent| + next if ent == '.' + next if ent == '..' + entpath = "#{path}/#{ent}" + if File.symlink?(entpath) + remove_file entpath + elsif File.dir?(entpath) + remove_tree0 entpath + else + force_remove_file entpath + end + end + begin + Dir.rmdir path + rescue Errno::ENOTEMPTY + # directory may not be empty + end + end + + def move_file(src, dest) + force_remove_file dest + begin + File.rename src, dest + rescue + File.open(dest, 'wb') {|f| + f.write File.binread(src) + } + File.chmod File.stat(src).mode, dest + File.unlink src + end + end + + def force_remove_file(path) + begin + remove_file path + rescue + end + end + + def remove_file(path) + File.chmod 0777, path + File.unlink path + end + + def install(from, dest, mode, prefix = nil) + $stderr.puts "install #{from} #{dest}" if verbose? + return if no_harm? + + realdest = prefix ? prefix + File.expand_path(dest) : dest + realdest = File.join(realdest, File.basename(from)) if File.dir?(realdest) + str = File.binread(from) + if diff?(str, realdest) + verbose_off { + rm_f realdest if File.exist?(realdest) + } + File.open(realdest, 'wb') {|f| + f.write str + } + File.chmod mode, realdest + + File.open("#{objdir_root()}/InstalledFiles", 'a') {|f| + if prefix + f.puts realdest.sub(prefix, '') + else + f.puts realdest + end + } + end + end + + def diff?(new_content, path) + return true unless File.exist?(path) + new_content != File.binread(path) + end + + def command(*args) + $stderr.puts args.join(' ') if verbose? + system(*args) or raise RuntimeError, + "system(#{args.map{|a| a.inspect }.join(' ')}) failed" + end + + def ruby(*args) + command config('rubyprog'), *args + end + + def make(task = nil) + command(*[config('makeprog'), task].compact) + end + + def extdir?(dir) + File.exist?("#{dir}/MANIFEST") or File.exist?("#{dir}/extconf.rb") + end + + def files_of(dir) + Dir.open(dir) {|d| + return d.select {|ent| File.file?("#{dir}/#{ent}") } + } + end + + DIR_REJECT = %w( . .. CVS SCCS RCS CVS.adm .svn ) + + def directories_of(dir) + Dir.open(dir) {|d| + return d.select {|ent| File.dir?("#{dir}/#{ent}") } - DIR_REJECT + } + end + +end + + +# This module requires: #srcdir_root, #objdir_root, #relpath +module HookScriptAPI + + def get_config(key) + @config[key] + end + + alias config get_config + + # obsolete: use metaconfig to change configuration + def set_config(key, val) + @config[key] = val + end + + # + # srcdir/objdir (works only in the package directory) + # + + def curr_srcdir + "#{srcdir_root()}/#{relpath()}" + end + + def curr_objdir + "#{objdir_root()}/#{relpath()}" + end + + def srcfile(path) + "#{curr_srcdir()}/#{path}" + end + + def srcexist?(path) + File.exist?(srcfile(path)) + end + + def srcdirectory?(path) + File.dir?(srcfile(path)) + end + + def srcfile?(path) + File.file?(srcfile(path)) + end + + def srcentries(path = '.') + Dir.open("#{curr_srcdir()}/#{path}") {|d| + return d.to_a - %w(. ..) + } + end + + def srcfiles(path = '.') + srcentries(path).select {|fname| + File.file?(File.join(curr_srcdir(), path, fname)) + } + end + + def srcdirectories(path = '.') + srcentries(path).select {|fname| + File.dir?(File.join(curr_srcdir(), path, fname)) + } + end + +end + + +class ToplevelInstaller + + Version = '3.4.1' + Copyright = 'Copyright (c) 2000-2005 Minero Aoki' + + TASKS = [ + [ 'all', 'do config, setup, then install' ], + [ 'config', 'saves your configurations' ], + [ 'show', 'shows current configuration' ], + [ 'setup', 'compiles ruby extentions and others' ], + [ 'install', 'installs files' ], + [ 'test', 'run all tests in test/' ], + [ 'clean', "does `make clean' for each extention" ], + [ 'distclean',"does `make distclean' for each extention" ] + ] + + def ToplevelInstaller.invoke + config = ConfigTable.new(load_rbconfig()) + config.load_standard_entries + config.load_multipackage_entries if multipackage? + config.fixup + klass = (multipackage?() ? ToplevelInstallerMulti : ToplevelInstaller) + klass.new(File.dirname($0), config).invoke + end + + def ToplevelInstaller.multipackage? + File.dir?(File.dirname($0) + '/packages') + end + + def ToplevelInstaller.load_rbconfig + if arg = ARGV.detect {|arg| /\A--rbconfig=/ =~ arg } + ARGV.delete(arg) + load File.expand_path(arg.split(/=/, 2)[1]) + $".push 'rbconfig.rb' + else + require 'rbconfig' + end + ::Config::CONFIG + end + + def initialize(ardir_root, config) + @ardir = File.expand_path(ardir_root) + @config = config + # cache + @valid_task_re = nil + end + + def config(key) + @config[key] + end + + def inspect + "#<#{self.class} #{__id__()}>" + end + + def invoke + run_metaconfigs + case task = parsearg_global() + when nil, 'all' + parsearg_config + init_installers + exec_config + exec_setup + exec_install + else + case task + when 'config', 'test' + ; + when 'clean', 'distclean' + @config.load_savefile if File.exist?(@config.savefile) + else + @config.load_savefile + end + __send__ "parsearg_#{task}" + init_installers + __send__ "exec_#{task}" + end + end + + def run_metaconfigs + @config.load_script "#{@ardir}/metaconfig" + end + + def init_installers + @installer = Installer.new(@config, @ardir, File.expand_path('.')) + end + + # + # Hook Script API bases + # + + def srcdir_root + @ardir + end + + def objdir_root + '.' + end + + def relpath + '.' + end + + # + # Option Parsing + # + + def parsearg_global + while arg = ARGV.shift + case arg + when /\A\w+\z/ + setup_rb_error "invalid task: #{arg}" unless valid_task?(arg) + return arg + when '-q', '--quiet' + @config.verbose = false + when '--verbose' + @config.verbose = true + when '--help' + print_usage $stdout + exit 0 + when '--version' + puts "#{File.basename($0)} version #{Version}" + exit 0 + when '--copyright' + puts Copyright + exit 0 + else + setup_rb_error "unknown global option '#{arg}'" + end + end + nil + end + + def valid_task?(t) + valid_task_re() =~ t + end + + def valid_task_re + @valid_task_re ||= /\A(?:#{TASKS.map {|task,desc| task }.join('|')})\z/ + end + + def parsearg_no_options + unless ARGV.empty? + task = caller(0).first.slice(%r<`parsearg_(\w+)'>, 1) + setup_rb_error "#{task}: unknown options: #{ARGV.join(' ')}" + end + end + + alias parsearg_show parsearg_no_options + alias parsearg_setup parsearg_no_options + alias parsearg_test parsearg_no_options + alias parsearg_clean parsearg_no_options + alias parsearg_distclean parsearg_no_options + + def parsearg_config + evalopt = [] + set = [] + @config.config_opt = [] + while i = ARGV.shift + if /\A--?\z/ =~ i + @config.config_opt = ARGV.dup + break + end + name, value = *@config.parse_opt(i) + if @config.value_config?(name) + @config[name] = value + else + evalopt.push [name, value] + end + set.push name + end + evalopt.each do |name, value| + @config.lookup(name).evaluate value, @config + end + # Check if configuration is valid + set.each do |n| + @config[n] if @config.value_config?(n) + end + end + + def parsearg_install + @config.no_harm = false + @config.install_prefix = '' + while a = ARGV.shift + case a + when '--no-harm' + @config.no_harm = true + when /\A--prefix=/ + path = a.split(/=/, 2)[1] + path = File.expand_path(path) unless path[0,1] == '/' + @config.install_prefix = path + else + setup_rb_error "install: unknown option #{a}" + end + end + end + + def print_usage(out) + out.puts 'Typical Installation Procedure:' + out.puts " $ ruby #{File.basename $0} config" + out.puts " $ ruby #{File.basename $0} setup" + out.puts " # ruby #{File.basename $0} install (may require root privilege)" + out.puts + out.puts 'Detailed Usage:' + out.puts " ruby #{File.basename $0} " + out.puts " ruby #{File.basename $0} [] []" + + fmt = " %-24s %s\n" + out.puts + out.puts 'Global options:' + out.printf fmt, '-q,--quiet', 'suppress message outputs' + out.printf fmt, ' --verbose', 'output messages verbosely' + out.printf fmt, ' --help', 'print this message' + out.printf fmt, ' --version', 'print version and quit' + out.printf fmt, ' --copyright', 'print copyright and quit' + out.puts + out.puts 'Tasks:' + TASKS.each do |name, desc| + out.printf fmt, name, desc + end + + fmt = " %-24s %s [%s]\n" + out.puts + out.puts 'Options for CONFIG or ALL:' + @config.each do |item| + out.printf fmt, item.help_opt, item.description, item.help_default + end + out.printf fmt, '--rbconfig=path', 'rbconfig.rb to load',"running ruby's" + out.puts + out.puts 'Options for INSTALL:' + out.printf fmt, '--no-harm', 'only display what to do if given', 'off' + out.printf fmt, '--prefix=path', 'install path prefix', '' + out.puts + end + + # + # Task Handlers + # + + def exec_config + @installer.exec_config + @config.save # must be final + end + + def exec_setup + @installer.exec_setup + end + + def exec_install + @installer.exec_install + end + + def exec_test + @installer.exec_test + end + + def exec_show + @config.each do |i| + printf "%-20s %s\n", i.name, i.value if i.value? + end + end + + def exec_clean + @installer.exec_clean + end + + def exec_distclean + @installer.exec_distclean + end + +end # class ToplevelInstaller + + +class ToplevelInstallerMulti < ToplevelInstaller + + include FileOperations + + def initialize(ardir_root, config) + super + @packages = directories_of("#{@ardir}/packages") + raise 'no package exists' if @packages.empty? + @root_installer = Installer.new(@config, @ardir, File.expand_path('.')) + end + + def run_metaconfigs + @config.load_script "#{@ardir}/metaconfig", self + @packages.each do |name| + @config.load_script "#{@ardir}/packages/#{name}/metaconfig" + end + end + + attr_reader :packages + + def packages=(list) + raise 'package list is empty' if list.empty? + list.each do |name| + raise "directory packages/#{name} does not exist"\ + unless File.dir?("#{@ardir}/packages/#{name}") + end + @packages = list + end + + def init_installers + @installers = {} + @packages.each do |pack| + @installers[pack] = Installer.new(@config, + "#{@ardir}/packages/#{pack}", + "packages/#{pack}") + end + with = extract_selection(config('with')) + without = extract_selection(config('without')) + @selected = @installers.keys.select {|name| + (with.empty? or with.include?(name)) \ + and not without.include?(name) + } + end + + def extract_selection(list) + a = list.split(/,/) + a.each do |name| + setup_rb_error "no such package: #{name}" unless @installers.key?(name) + end + a + end + + def print_usage(f) + super + f.puts 'Inluded packages:' + f.puts ' ' + @packages.sort.join(' ') + f.puts + end + + # + # Task Handlers + # + + def exec_config + run_hook 'pre-config' + each_selected_installers {|inst| inst.exec_config } + run_hook 'post-config' + @config.save # must be final + end + + def exec_setup + run_hook 'pre-setup' + each_selected_installers {|inst| inst.exec_setup } + run_hook 'post-setup' + end + + def exec_install + run_hook 'pre-install' + each_selected_installers {|inst| inst.exec_install } + run_hook 'post-install' + end + + def exec_test + run_hook 'pre-test' + each_selected_installers {|inst| inst.exec_test } + run_hook 'post-test' + end + + def exec_clean + rm_f @config.savefile + run_hook 'pre-clean' + each_selected_installers {|inst| inst.exec_clean } + run_hook 'post-clean' + end + + def exec_distclean + rm_f @config.savefile + run_hook 'pre-distclean' + each_selected_installers {|inst| inst.exec_distclean } + run_hook 'post-distclean' + end + + # + # lib + # + + def each_selected_installers + Dir.mkdir 'packages' unless File.dir?('packages') + @selected.each do |pack| + $stderr.puts "Processing the package `#{pack}' ..." if verbose? + Dir.mkdir "packages/#{pack}" unless File.dir?("packages/#{pack}") + Dir.chdir "packages/#{pack}" + yield @installers[pack] + Dir.chdir '../..' + end + end + + def run_hook(id) + @root_installer.run_hook id + end + + # module FileOperations requires this + def verbose? + @config.verbose? + end + + # module FileOperations requires this + def no_harm? + @config.no_harm? + end + +end # class ToplevelInstallerMulti + + +class Installer + + FILETYPES = %w( bin lib ext data conf man ) + + include FileOperations + include HookScriptAPI + + def initialize(config, srcroot, objroot) + @config = config + @srcdir = File.expand_path(srcroot) + @objdir = File.expand_path(objroot) + @currdir = '.' + end + + def inspect + "#<#{self.class} #{File.basename(@srcdir)}>" + end + + def noop(rel) + end + + # + # Hook Script API base methods + # + + def srcdir_root + @srcdir + end + + def objdir_root + @objdir + end + + def relpath + @currdir + end + + # + # Config Access + # + + # module FileOperations requires this + def verbose? + @config.verbose? + end + + # module FileOperations requires this + def no_harm? + @config.no_harm? + end + + def verbose_off + begin + save, @config.verbose = @config.verbose?, false + yield + ensure + @config.verbose = save + end + end + + # + # TASK config + # + + def exec_config + exec_task_traverse 'config' + end + + alias config_dir_bin noop + alias config_dir_lib noop + + def config_dir_ext(rel) + extconf if extdir?(curr_srcdir()) + end + + alias config_dir_data noop + alias config_dir_conf noop + alias config_dir_man noop + + def extconf + ruby "#{curr_srcdir()}/extconf.rb", *@config.config_opt + end + + # + # TASK setup + # + + def exec_setup + exec_task_traverse 'setup' + end + + def setup_dir_bin(rel) + files_of(curr_srcdir()).each do |fname| + update_shebang_line "#{curr_srcdir()}/#{fname}" + end + end + + alias setup_dir_lib noop + + def setup_dir_ext(rel) + make if extdir?(curr_srcdir()) + end + + alias setup_dir_data noop + alias setup_dir_conf noop + alias setup_dir_man noop + + def update_shebang_line(path) + return if no_harm? + return if config('shebang') == 'never' + old = Shebang.load(path) + if old + $stderr.puts "warning: #{path}: Shebang line includes too many args. It is not portable and your program may not work." if old.args.size > 1 + new = new_shebang(old) + return if new.to_s == old.to_s + else + return unless config('shebang') == 'all' + new = Shebang.new(config('rubypath')) + end + $stderr.puts "updating shebang: #{File.basename(path)}" if verbose? + open_atomic_writer(path) {|output| + File.open(path, 'rb') {|f| + f.gets if old # discard + output.puts new.to_s + output.print f.read + } + } + end + + def new_shebang(old) + if /\Aruby/ =~ File.basename(old.cmd) + Shebang.new(config('rubypath'), old.args) + elsif File.basename(old.cmd) == 'env' and old.args.first == 'ruby' + Shebang.new(config('rubypath'), old.args[1..-1]) + else + return old unless config('shebang') == 'all' + Shebang.new(config('rubypath')) + end + end + + def open_atomic_writer(path, &block) + tmpfile = File.basename(path) + '.tmp' + begin + File.open(tmpfile, 'wb', &block) + File.rename tmpfile, File.basename(path) + ensure + File.unlink tmpfile if File.exist?(tmpfile) + end + end + + class Shebang + def Shebang.load(path) + line = nil + File.open(path) {|f| + line = f.gets + } + return nil unless /\A#!/ =~ line + parse(line) + end + + def Shebang.parse(line) + cmd, *args = *line.strip.sub(/\A\#!/, '').split(' ') + new(cmd, args) + end + + def initialize(cmd, args = []) + @cmd = cmd + @args = args + end + + attr_reader :cmd + attr_reader :args + + def to_s + "#! #{@cmd}" + (@args.empty? ? '' : " #{@args.join(' ')}") + end + end + + # + # TASK install + # + + def exec_install + rm_f 'InstalledFiles' + exec_task_traverse 'install' + end + + def install_dir_bin(rel) + install_files targetfiles(), "#{config('bindir')}/#{rel}", 0755 + end + + def install_dir_lib(rel) + install_files libfiles(), "#{config('rbdir')}/#{rel}", 0644 + end + + def install_dir_ext(rel) + return unless extdir?(curr_srcdir()) + install_files rubyextentions('.'), + "#{config('sodir')}/#{File.dirname(rel)}", + 0555 + end + + def install_dir_data(rel) + install_files targetfiles(), "#{config('datadir')}/#{rel}", 0644 + end + + def install_dir_conf(rel) + # FIXME: should not remove current config files + # (rename previous file to .old/.org) + install_files targetfiles(), "#{config('sysconfdir')}/#{rel}", 0644 + end + + def install_dir_man(rel) + install_files targetfiles(), "#{config('mandir')}/#{rel}", 0644 + end + + def install_files(list, dest, mode) + mkdir_p dest, @config.install_prefix + list.each do |fname| + install fname, dest, mode, @config.install_prefix + end + end + + def libfiles + glob_reject(%w(*.y *.output), targetfiles()) + end + + def rubyextentions(dir) + ents = glob_select("*.#{@config.dllext}", targetfiles()) + if ents.empty? + setup_rb_error "no ruby extention exists: 'ruby #{$0} setup' first" + end + ents + end + + def targetfiles + mapdir(existfiles() - hookfiles()) + end + + def mapdir(ents) + ents.map {|ent| + if File.exist?(ent) + then ent # objdir + else "#{curr_srcdir()}/#{ent}" # srcdir + end + } + end + + # picked up many entries from cvs-1.11.1/src/ignore.c + JUNK_FILES = %w( + core RCSLOG tags TAGS .make.state + .nse_depinfo #* .#* cvslog.* ,* .del-* *.olb + *~ *.old *.bak *.BAK *.orig *.rej _$* *$ + + *.org *.in .* + ) + + def existfiles + glob_reject(JUNK_FILES, (files_of(curr_srcdir()) | files_of('.'))) + end + + def hookfiles + %w( pre-%s post-%s pre-%s.rb post-%s.rb ).map {|fmt| + %w( config setup install clean ).map {|t| sprintf(fmt, t) } + }.flatten + end + + def glob_select(pat, ents) + re = globs2re([pat]) + ents.select {|ent| re =~ ent } + end + + def glob_reject(pats, ents) + re = globs2re(pats) + ents.reject {|ent| re =~ ent } + end + + GLOB2REGEX = { + '.' => '\.', + '$' => '\$', + '#' => '\#', + '*' => '.*' + } + + def globs2re(pats) + /\A(?:#{ + pats.map {|pat| pat.gsub(/[\.\$\#\*]/) {|ch| GLOB2REGEX[ch] } }.join('|') + })\z/ + end + + # + # TASK test + # + + TESTDIR = 'test' + + def exec_test + unless File.directory?('test') + $stderr.puts 'no test in this package' if verbose? + return + end + $stderr.puts 'Running tests...' if verbose? + begin + require 'test/unit' + rescue LoadError + setup_rb_error 'test/unit cannot loaded. You need Ruby 1.8 or later to invoke this task.' + end + runner = Test::Unit::AutoRunner.new(true) + runner.to_run << TESTDIR + runner.run + end + + # + # TASK clean + # + + def exec_clean + exec_task_traverse 'clean' + rm_f @config.savefile + rm_f 'InstalledFiles' + end + + alias clean_dir_bin noop + alias clean_dir_lib noop + alias clean_dir_data noop + alias clean_dir_conf noop + alias clean_dir_man noop + + def clean_dir_ext(rel) + return unless extdir?(curr_srcdir()) + make 'clean' if File.file?('Makefile') + end + + # + # TASK distclean + # + + def exec_distclean + exec_task_traverse 'distclean' + rm_f @config.savefile + rm_f 'InstalledFiles' + end + + alias distclean_dir_bin noop + alias distclean_dir_lib noop + + def distclean_dir_ext(rel) + return unless extdir?(curr_srcdir()) + make 'distclean' if File.file?('Makefile') + end + + alias distclean_dir_data noop + alias distclean_dir_conf noop + alias distclean_dir_man noop + + # + # Traversing + # + + def exec_task_traverse(task) + run_hook "pre-#{task}" + FILETYPES.each do |type| + if type == 'ext' and config('without-ext') == 'yes' + $stderr.puts 'skipping ext/* by user option' if verbose? + next + end + traverse task, type, "#{task}_dir_#{type}" + end + run_hook "post-#{task}" + end + + def traverse(task, rel, mid) + dive_into(rel) { + run_hook "pre-#{task}" + __send__ mid, rel.sub(%r[\A.*?(?:/|\z)], '') + directories_of(curr_srcdir()).each do |d| + traverse task, "#{rel}/#{d}", mid + end + run_hook "post-#{task}" + } + end + + def dive_into(rel) + return unless File.dir?("#{@srcdir}/#{rel}") + + dir = File.basename(rel) + Dir.mkdir dir unless File.dir?(dir) + prevdir = Dir.pwd + Dir.chdir dir + $stderr.puts '---> ' + rel if verbose? + @currdir = rel + yield + Dir.chdir prevdir + $stderr.puts '<--- ' + rel if verbose? + @currdir = File.dirname(rel) + end + + def run_hook(id) + path = [ "#{curr_srcdir()}/#{id}", + "#{curr_srcdir()}/#{id}.rb" ].detect {|cand| File.file?(cand) } + return unless path + begin + instance_eval File.read(path), path, 1 + rescue + raise if $DEBUG + setup_rb_error "hook #{path} failed:\n" + $!.message + end + end + +end # class Installer + + +class SetupError < StandardError; end + +def setup_rb_error(msg) + raise SetupError, msg +end + +if $0 == __FILE__ + begin + ToplevelInstaller.invoke + rescue SetupError + raise if $DEBUG + $stderr.puts $!.message + $stderr.puts "Try 'ruby #{$0} --help' for detailed usage." + exit 1 + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.document b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.document new file mode 100644 index 000000000..ecf367319 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.document @@ -0,0 +1,5 @@ +README.rdoc +lib/**/*.rb +bin/* +features/**/*.feature +LICENSE diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.gitignore b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.gitignore new file mode 100644 index 000000000..482f92bf7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/.gitignore @@ -0,0 +1,5 @@ +.DS_Store +.yardoc +/coverage +/doc +/pkg diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/LICENSE b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/LICENSE new file mode 100644 index 000000000..590bcb6fa --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/LICENSE @@ -0,0 +1,20 @@ +Copyright 2009 Chris Kampmeier + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/README.rdoc b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/README.rdoc new file mode 100644 index 000000000..cfab5434f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/README.rdoc @@ -0,0 +1,70 @@ += Samuel + +Samuel is a gem for automatic logging of your Net::HTTP requests. It's named for +the serial diarist Mr. Pepys, who was known to reliably record events both +quotidian and remarkable. + +Should a Great Plague, Fire, or Whale befall an important external web service +you use, you'll be sure to have a tidy record of it. + +== Usage: + +When Rails is loaded, Samuel configures a few things automatically. So all you +need to do is this: + + # config/environment.rb + config.gem "samuel" + +And Samuel will automatically use Rails's logger and an ActiveRecord-like format. + +For non-Rails projects, you'll have to manually configure logging, like this: + + require 'samuel' + Samuel.logger = Logger.new('http_requests.log') + +If you don't assign a logger, Samuel will configure a default logger on +STDOUT+. + +== Configuration + +There are two ways to specify configuration options for Samuel: global and +inline. Global configs look like this: + + Samuel.config[:labels] = {"example.com" => "Example API"} + Samuel.config[:filtered_params] = :password + +You should put global configuration somewhere early-on in your program. If +you're using Rails, config/initializers/samuel.rb will do the trick. + +Alternatively, an inline configuration block temporarily overrides any global +configuration for a set of HTTP requests: + + Samuel.with_config :label => "Twitter API" do + Net::HTTP.start("twitter.com") { |http| http.get("/help/test") } + end + +Right now, there are three configuration changes you can make in either style: + +* +:labels+ - This is a hash with domain substrings as keys and log labels as + values. If a request domain includes one of the domain substrings, the + corresponding label will be used for the first part of that log entry. By + default this is set to \{"" => "HTTP"}, so that all requests are + labeled with "HTTP Request". +* +:label+ - As an alternative to the +:labels+ hash, this is simply a string. + If set, it takes precedence over any +:labels+ (by default, it's not set). It + gets "Request" appended to it as well -- so if you want your log to + always say +Twitter API Request+ instead of the default +HTTP Request+, you + can set this to "Twitter API". I'd recommend using this setting + globally if you're only making requests to one service, or inline if you just + need to temporarily override the global +:labels+. +* +:filtered_params+ - This works just like Rails's +filter_parameter_logging+ + method. Set it to a symbol, string, or array of them, and Samuel will filter + the value of query parameters that have any of these patterns as a substring + by replacing the value with [FILTERED] in your logs. By default, no + filtering is enabled. + +Samuel logs successful HTTP requests at the +INFO+ level; Failed requests log at +the +WARN+ level. This isn't currently configurable, but it's on the list. + +== License + +Copyright 2009 Chris Kampmeier. See +LICENSE+ for details. diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/Rakefile b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/Rakefile new file mode 100644 index 000000000..ffbe60384 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/Rakefile @@ -0,0 +1,62 @@ +require 'rubygems' +require 'rake' + +begin + require 'jeweler' + Jeweler::Tasks.new do |gem| + gem.name = "samuel" + gem.summary = %Q{An automatic logger for HTTP requests in Ruby} + gem.description = %Q{An automatic logger for HTTP requests in Ruby. Adds Net::HTTP request logging to your Rails logs, and more.} + gem.email = "chris@kampers.net" + gem.homepage = "http://github.com/chrisk/samuel" + gem.authors = ["Chris Kampmeier"] + gem.rubyforge_project = "samuel" + gem.add_development_dependency "thoughtbot-shoulda" + gem.add_development_dependency "yard" + gem.add_development_dependency "mocha" + gem.add_development_dependency "fakeweb" + end + Jeweler::GemcutterTasks.new + Jeweler::RubyforgeTasks.new do |rubyforge| + rubyforge.doc_task = "yardoc" + end +rescue LoadError + puts "Jeweler (or a dependency) not available. Install it with: sudo gem install jeweler" +end + +require 'rake/testtask' +Rake::TestTask.new(:test) do |test| + test.libs << 'lib' << 'test' + test.pattern = 'test/**/*_test.rb' + test.verbose = false + test.warning = true +end + +begin + require 'rcov/rcovtask' + Rcov::RcovTask.new do |test| + test.libs << 'test' + test.pattern = 'test/**/*_test.rb' + test.rcov_opts << "--sort coverage" + test.rcov_opts << "--exclude gems" + test.verbose = false + test.warning = true + end +rescue LoadError + task :rcov do + abort "RCov is not available. In order to run rcov, you must: sudo gem install spicycode-rcov" + end +end + +task :test => :check_dependencies + +task :default => :test + +begin + require 'yard' + YARD::Rake::YardocTask.new +rescue LoadError + task :yardoc do + abort "YARD is not available. In order to run yardoc, you must: sudo gem install yard" + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/VERSION b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/VERSION new file mode 100644 index 000000000..0c62199f1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/VERSION @@ -0,0 +1 @@ +0.2.1 diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel.rb new file mode 100644 index 000000000..5c8fed6f6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel.rb @@ -0,0 +1,52 @@ +require "logger" +require "net/http" +require "net/https" +require "benchmark" + +require "samuel/net_http" +require "samuel/request" + + +module Samuel + extend self + + attr_writer :config, :logger + + def logger + @logger = nil if !defined?(@logger) + return @logger if !@logger.nil? + + if defined?(RAILS_DEFAULT_LOGGER) + @logger = RAILS_DEFAULT_LOGGER + else + @logger = Logger.new(STDOUT) + end + end + + def config + Thread.current[:__samuel_config] ? Thread.current[:__samuel_config] : @config + end + + def log_request(http, request, &block) + request = Request.new(http, request, block) + request.perform_and_log! + request.response + end + + def with_config(options = {}) + original_config = config.dup + nested = !Thread.current[:__samuel_config].nil? + + Thread.current[:__samuel_config] = original_config.merge(options) + yield + Thread.current[:__samuel_config] = nested ? original_config : nil + end + + def reset_config + Thread.current[:__samuel_config] = nil + @config = {:label => nil, :labels => {"" => "HTTP"}, :filtered_params => []} + end + +end + +Samuel.reset_config diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/net_http.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/net_http.rb new file mode 100644 index 000000000..2ffadf220 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/net_http.rb @@ -0,0 +1,10 @@ +class Net::HTTP + + alias request_without_samuel request + def request(req, body = nil, &block) + Samuel.log_request(self, req) do + request_without_samuel(req, body, &block) + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/request.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/request.rb new file mode 100644 index 000000000..e10ecb44e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/lib/samuel/request.rb @@ -0,0 +1,96 @@ +module Samuel + class Request + + attr_accessor :response + + def initialize(http, request, proc) + @http, @request, @proc = http, request, proc + end + + def perform_and_log! + # If an exception is raised in the Benchmark block, it'll interrupt the + # benchmark. Instead, use an inner block to record it as the "response" + # for raising after the benchmark (and logging) is done. + @seconds = Benchmark.realtime do + begin; @response = @proc.call; rescue Exception => @response; end + end + Samuel.logger.add(log_level, log_message) + raise @response if @response.is_a?(Exception) + end + + private + + def log_message + bold = "\e[1m" + blue = "\e[34m" + underline = "\e[4m" + reset = "\e[0m" + " #{bold}#{blue}#{underline}#{label} request (#{milliseconds}ms) " + + "#{response_summary}#{reset} #{method} #{uri}" + end + + def milliseconds + (@seconds * 1000).round + end + + def uri + "#{scheme}://#{@http.address}#{port_if_not_default}#{filtered_path}" + end + + def filtered_path + path_without_query, query = @request.path.split("?") + if query + patterns = [Samuel.config[:filtered_params]].flatten + patterns.map { |pattern| + pattern_for_regex = Regexp.escape(pattern.to_s) + [/([^&]*#{pattern_for_regex}[^&=]*)=(?:[^&]+)/, '\1=[FILTERED]'] + }.each { |filter| query.gsub!(*filter) } + "#{path_without_query}?#{query}" + else + @request.path + end + end + + def scheme + @http.use_ssl? ? "https" : "http" + end + + def port_if_not_default + ssl, port = @http.use_ssl?, @http.port + if (!ssl && port == 80) || (ssl && port == 443) + "" + else + ":#{port}" + end + end + + def method + @request.method.to_s.upcase + end + + def label + return Samuel.config[:label] if Samuel.config[:label] + + pair = Samuel.config[:labels].detect { |domain, label| @http.address.include?(domain) } + pair[1] if pair + end + + def response_summary + if response.is_a?(Exception) + response.class + else + "[#{response.code} #{response.message}]" + end + end + + def log_level + error_classes = [Exception, Net::HTTPClientError, Net::HTTPServerError] + if error_classes.any? { |klass| response.is_a?(klass) } + level = Logger::WARN + else + level = Logger::INFO + end + end + + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/samuel.gemspec b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/samuel.gemspec new file mode 100644 index 000000000..3a3719bf8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/samuel.gemspec @@ -0,0 +1,69 @@ +# Generated by jeweler +# DO NOT EDIT THIS FILE +# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec` +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{samuel} + s.version = "0.2.1" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Chris Kampmeier"] + s.date = %q{2009-09-15} + s.description = %q{An automatic logger for HTTP requests in Ruby. Adds Net::HTTP request logging to your Rails logs, and more.} + s.email = %q{chris@kampers.net} + s.extra_rdoc_files = [ + "LICENSE", + "README.rdoc" + ] + s.files = [ + ".document", + ".gitignore", + "LICENSE", + "README.rdoc", + "Rakefile", + "VERSION", + "lib/samuel.rb", + "lib/samuel/net_http.rb", + "lib/samuel/request.rb", + "samuel.gemspec", + "test/request_test.rb", + "test/samuel_test.rb", + "test/test_helper.rb", + "test/thread_test.rb" + ] + s.homepage = %q{http://github.com/chrisk/samuel} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{samuel} + s.rubygems_version = %q{1.3.5} + s.summary = %q{An automatic logger for HTTP requests in Ruby} + s.test_files = [ + "test/request_test.rb", + "test/samuel_test.rb", + "test/test_helper.rb", + "test/thread_test.rb" + ] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/request_test.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/request_test.rb new file mode 100644 index 000000000..4e905d1ec --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/request_test.rb @@ -0,0 +1,193 @@ +require 'test_helper' + +class RequestTest < Test::Unit::TestCase + + context "making an HTTP request" do + setup { setup_test_logger + FakeWeb.clean_registry + Samuel.reset_config } + teardown { teardown_test_logger } + + context "to GET http://example.com/test, responding with a 200 in 53ms" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Benchmark.stubs(:realtime).yields.returns(0.053) + open "http://example.com/test" + end + + should_log_lines 1 + should_log_at_level :info + should_log_including "HTTP request" + should_log_including "(53ms)" + should_log_including "[200 OK]" + should_log_including "GET http://example.com/test" + end + + context "on a non-standard port" do + setup do + FakeWeb.register_uri(:get, "http://example.com:8080/test", :status => [200, "OK"]) + open "http://example.com:8080/test" + end + + should_log_including "GET http://example.com:8080/test" + end + + context "with SSL" do + setup do + FakeWeb.register_uri(:get, "https://example.com/test", :status => [200, "OK"]) + open "https://example.com/test" + end + + should_log_including "HTTP request" + should_log_including "GET https://example.com/test" + end + + context "with SSL on a non-standard port" do + setup do + FakeWeb.register_uri(:get, "https://example.com:80/test", :status => [200, "OK"]) + open "https://example.com:80/test" + end + + should_log_including "HTTP request" + should_log_including "GET https://example.com:80/test" + end + + context "that raises" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :exception => Errno::ECONNREFUSED) + begin + Net::HTTP.start("example.com") { |http| http.get("/test") } + rescue Errno::ECONNREFUSED => @exception + end + end + + should_log_at_level :warn + should_log_including "HTTP request" + should_log_including "GET http://example.com/test" + should_log_including "Errno::ECONNREFUSED" + should_log_including %r|\d+ms| + should_raise_exception Errno::ECONNREFUSED + end + + context "that responds with a 500-level code" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [502, "Bad Gateway"]) + Net::HTTP.start("example.com") { |http| http.get("/test") } + end + + should_log_at_level :warn + end + + context "that responds with a 400-level code" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [404, "Not Found"]) + Net::HTTP.start("example.com") { |http| http.get("/test") } + end + + should_log_at_level :warn + end + + context "inside a configuration block with :label => 'Example'" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Samuel.with_config :label => "Example" do + open "http://example.com/test" + end + end + + should_log_including "Example request" + should_have_config_afterwards_including :labels => {"" => "HTTP"}, + :label => nil + end + + context "inside a configuration block with :filter_params" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test?password=secret&username=chrisk", + :status => [200, "OK"]) + @uri = "http://example.com/test?password=secret&username=chrisk" + end + + context "=> :password" do + setup { Samuel.with_config(:filtered_params => :password) { open @uri } } + should_log_including "http://example.com/test?password=[FILTERED]&username=chrisk" + end + + context "=> :as" do + setup { Samuel.with_config(:filtered_params => :ass) { open @uri } } + should_log_including "http://example.com/test?password=[FILTERED]&username=chrisk" + end + + context "=> ['pass', 'name']" do + setup { Samuel.with_config(:filtered_params => %w(pass name)) { open @uri } } + should_log_including "http://example.com/test?password=[FILTERED]&username=[FILTERED]" + end + end + + context "with a global config including :label => 'Example'" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Samuel.config[:label] = "Example" + open "http://example.com/test" + end + + should_log_including "Example request" + should_have_config_afterwards_including :labels => {"" => "HTTP"}, + :label => "Example" + end + + context "with a global config including :label => 'Example' but inside config block that changes it to 'Example 2'" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Samuel.config[:label] = "Example" + Samuel.with_config(:label => "Example 2") { open "http://example.com/test" } + end + + should_log_including "Example 2 request" + should_have_config_afterwards_including :labels => {"" => "HTTP"}, + :label => "Example" + end + + context "inside a config block of :label => 'Example 2' nested inside a config block of :label => 'Example'" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Samuel.with_config :label => "Example" do + Samuel.with_config :label => "Example 2" do + open "http://example.com/test" + end + end + end + + should_log_including "Example 2 request" + should_have_config_afterwards_including :labels => {"" => "HTTP"}, + :label => nil + end + + context "wth a global config including :labels => {'example.com' => 'Example'} but inside a config block of :label => 'Example 3' nested inside a config block of :label => 'Example 2'" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Samuel.config[:labels] = {'example.com' => 'Example'} + Samuel.with_config :label => "Example 2" do + Samuel.with_config :label => "Example 3" do + open "http://example.com/test" + end + end + end + + should_log_including "Example 3 request" + should_have_config_afterwards_including :labels => {'example.com' => 'Example'}, + :label => nil + end + + context "with a global config including :labels => {'example.com' => 'Example API'}" do + setup do + FakeWeb.register_uri(:get, "http://example.com/test", :status => [200, "OK"]) + Samuel.config[:labels] = {'example.com' => 'Example API'} + open "http://example.com/test" + end + + should_log_including "Example API request" + end + + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/samuel_test.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/samuel_test.rb new file mode 100644 index 000000000..4a3665fa6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/samuel_test.rb @@ -0,0 +1,42 @@ +require 'test_helper' + +class SamuelTest < Test::Unit::TestCase + + context "logger configuration" do + setup do + Samuel.logger = nil + if Object.const_defined?(:RAILS_DEFAULT_LOGGER) + Object.send(:remove_const, :RAILS_DEFAULT_LOGGER) + end + end + + teardown do + Samuel.logger = nil + end + + context "when Rails's logger is available" do + setup { Object.const_set(:RAILS_DEFAULT_LOGGER, :mock_logger) } + + should "use the same logger" do + assert_equal :mock_logger, Samuel.logger + end + end + + context "when Rails's logger is not available" do + should "use a new Logger instance pointed to STDOUT" do + assert_instance_of Logger, Samuel.logger + assert_equal STDOUT, Samuel.logger.instance_variable_get(:"@logdev").dev + end + end + end + + + context ".reset_config" do + should "reset the config to default vaules" do + Samuel.config = {:foo => "bar"} + Samuel.reset_config + assert_equal({:label => nil, :labels => {"" => "HTTP"}, :filtered_params => []}, Samuel.config) + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/test_helper.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/test_helper.rb new file mode 100644 index 000000000..2862051b9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/test_helper.rb @@ -0,0 +1,66 @@ +require 'rubygems' +require 'test/unit' +require 'shoulda' +require 'mocha' +require 'open-uri' +require 'fakeweb' + +FakeWeb.allow_net_connect = false + +$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib')) +$LOAD_PATH.unshift(File.dirname(__FILE__)) +require 'samuel' + +class Test::Unit::TestCase + TEST_LOG_PATH = File.join(File.dirname(__FILE__), 'test.log') + + def self.should_log_lines(expected_count) + should "log #{expected_count} line#{'s' unless expected_count == 1}" do + lines = File.readlines(TEST_LOG_PATH) + assert_equal expected_count, lines.length + end + end + + def self.should_log_including(what) + should "log a line including #{what.inspect}" do + contents = File.read(TEST_LOG_PATH) + if what.is_a?(Regexp) + assert_match what, contents + else + assert contents.include?(what), + "Expected #{contents.inspect} to include #{what.inspect}" + end + end + end + + def self.should_log_at_level(level) + level = level.to_s.upcase + should "log at the #{level} level" do + assert File.read(TEST_LOG_PATH).include?(" #{level} -- :") + end + end + + def self.should_raise_exception(klass) + should "raise an #{klass} exception" do + assert @exception.is_a?(klass) + end + end + + def self.should_have_config_afterwards_including(config) + config.each_pair do |key, value| + should "continue afterwards with Samuel.config[#{key.inspect}] set to #{value.inspect}" do + assert_equal value, Samuel.config[key] + end + end + end + + def setup_test_logger + FileUtils.rm_rf TEST_LOG_PATH + FileUtils.touch TEST_LOG_PATH + Samuel.logger = Logger.new(TEST_LOG_PATH) + end + + def teardown_test_logger + FileUtils.rm_rf TEST_LOG_PATH + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/thread_test.rb b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/thread_test.rb new file mode 100644 index 000000000..d030cb973 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/fakeweb-1.3.0/test/vendor/samuel-0.2.1/test/thread_test.rb @@ -0,0 +1,32 @@ +require 'test_helper' + +class ThreadTest < Test::Unit::TestCase + + context "when logging multiple requests at once" do + setup do + @log = StringIO.new + Samuel.logger = Logger.new(@log) + FakeWeb.register_uri(:get, /example\.com/, :status => [200, "OK"]) + threads = [] + 5.times do |i| + threads << Thread.new(i) do |n| + Samuel.with_config :label => "Example #{n}" do + Thread.pass + open "http://example.com/#{n}" + end + end + end + threads.each { |t| t.join } + @log.rewind + end + + should "not let configuration blocks interfere with eachother" do + @log.each_line do |line| + matches = %r|Example (\d+).*example\.com/(\d+)|.match(line) + assert_not_nil matches + assert_equal matches[1], matches[2] + end + end + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/CHANGELOG.md b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/CHANGELOG.md new file mode 100644 index 000000000..95f091aaf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/CHANGELOG.md @@ -0,0 +1,574 @@ +# Faraday Changelog + +## The changelog has moved! + +This file is not being updated anymore. Instead, please check the [Releases](https://github.com/lostisland/faraday/releases) page. + +## [2.2.0](https://github.com/lostisland/faraday/compare/v2.1.0...v2.2.0) (2022-02-03) + +* Reintroduce the possibility to register middleware with symbols, strings or procs in [#1391](https://github.com/lostisland/faraday/pull/1391) + +## [2.1.0](https://github.com/lostisland/faraday/compare/v2.0.1...v2.1.0) (2022-01-15) + +* Fix test adapter thread safety by @iMacTia in [#1380](https://github.com/lostisland/faraday/pull/1380) +* Add default adapter options by @hirasawayuki in [#1382](https://github.com/lostisland/faraday/pull/1382) +* CI: Add Ruby 3.1 to matrix by @petergoldstein in [#1374](https://github.com/lostisland/faraday/pull/1374) +* docs: fix regex pattern in logger.md examples by @hirasawayuki in [#1378](https://github.com/lostisland/faraday/pull/1378) + +## [2.0.1](https://github.com/lostisland/faraday/compare/v2.0.0...v2.0.1) (2022-01-05) + +* Re-add `faraday-net_http` as default adapter by @iMacTia in [#1366](https://github.com/lostisland/faraday/pull/1366) +* Updated sample format in UPGRADING.md by @vimutter in [#1361](https://github.com/lostisland/faraday/pull/1361) +* docs: Make UPGRADING examples more copyable by @olleolleolle in [#1363](https://github.com/lostisland/faraday/pull/1363) + +## [2.0.0](https://github.com/lostisland/faraday/compare/v1.8.0...v2.0.0) (2022-01-04) + +The next major release is here, and it comes almost 2 years after the release of v1.0! + +This release changes the way you use Faraday and embraces a new paradigm of Faraday as an ecosystem, rather than a library. + +What does that mean? It means that Faraday is less of a bundled tool and more of a framework for the community to build on top of. + +As a result, all adapters and some middleware have moved out and are now shipped as standalone gems 🙌! + +But this doesn't mean that upgrading from Faraday 1.x to Faraday 2.0 should be hard, in fact we've listed everything you need to do in the [UPGRADING.md](https://github.com/lostisland/faraday/blob/main/UPGRADING.md) doc. + +Moreover, we've setup a new [awesome-faraday](https://github.com/lostisland/awesome-faraday) repository that will showcase a curated list of adapters and middleware 😎. + +This release was the result of the efforts of the core team and all the contributors, new and old, that have helped achieve this milestone 👏. + +## What's Changed + +* Autoloading, dependency loading and middleware registry cleanup by @iMacTia in [#1301](https://github.com/lostisland/faraday/pull/1301) +* Move JSON middleware (request and response) from faraday_middleware by @iMacTia in [#1300](https://github.com/lostisland/faraday/pull/1300) +* Remove deprecated `Faraday::Request#method` by @olleolleolle in [#1303](https://github.com/lostisland/faraday/pull/1303) +* Remove deprecated `Faraday::UploadIO` by @iMacTia in [#1307](https://github.com/lostisland/faraday/pull/1307) +* [1.x] Deprecate Authorization helpers in `Faraday::Connection` by @iMacTia in [#1306](https://github.com/lostisland/faraday/pull/1306) +* Drop deprecated auth helpers from Connection and refactor auth middleware by @iMacTia in [#1308](https://github.com/lostisland/faraday/pull/1308) +* Add Faraday 1.x examples in authentication.md docs by @iMacTia in [#1320](https://github.com/lostisland/faraday/pull/1320) +* Fix passing a URL with embedded basic auth by @iMacTia in [#1324](https://github.com/lostisland/faraday/pull/1324) +* Register JSON middleware by @mollerhoj in [#1331](https://github.com/lostisland/faraday/pull/1331) +* Retry middleware should handle string exception class name consistently by @jrochkind in [#1334](https://github.com/lostisland/faraday/pull/1334) +* Improve request info in exceptions raised by RaiseError Middleware by @willianzocolau in [#1335](https://github.com/lostisland/faraday/pull/1335) +* Remove net-http adapter and update docs by @iMacTia in [#1336](https://github.com/lostisland/faraday/pull/1336) +* Explain plan for faraday_middleware in UPGRADING.md by @iMacTia in [#1339](https://github.com/lostisland/faraday/pull/1339) +* Scripts folder cleanup by @iMacTia in [#1340](https://github.com/lostisland/faraday/pull/1340) +* Replace `Hash#merge` with `Utils#deep_merge` for connection options by @xkwd in [#1343](https://github.com/lostisland/faraday/pull/1343) +* Callable authorizers by @sled in [#1345](https://github.com/lostisland/faraday/pull/1345) +* Default value for exc error by @DariuszMusielak in [#1351](https://github.com/lostisland/faraday/pull/1351) +* Don't call `retry_block` unless a retry is going to happen by @jrochkind in [#1350](https://github.com/lostisland/faraday/pull/1350) +* Improve documentation for v2 by @iMacTia in [#1353](https://github.com/lostisland/faraday/pull/1353) +* Remove default `default_adapter` (yes, you read that right) by @iMacTia in [#1354](https://github.com/lostisland/faraday/pull/1354) +* Remove retry middleware by @iMacTia in [#1356](https://github.com/lostisland/faraday/pull/1356) +* Remove multipart middleware and all its documentation and tests by @iMacTia in [#1357](https://github.com/lostisland/faraday/pull/1357) + +## [1.9.3](https://github.com/lostisland/faraday/compare/v1.9.2...v1.9.3) (2022-01-06) + +* Re-add support for Ruby 2.4+ by @iMacTia in [#1371](https://github.com/lostisland/faraday/pull/1371) + +## [1.9.2](https://github.com/lostisland/faraday/compare/v1.9.1...v1.9.2) (2022-01-06) + +* Add alias with legacy name to gemified middleware by @iMacTia in [#1372](https://github.com/lostisland/faraday/pull/1372) + +## [1.9.1](https://github.com/lostisland/faraday/compare/v1.9.0...v1.9.1) (2022-01-06) + +* Update adapter dependencies in Gemspec by @iMacTia in [#1370](https://github.com/lostisland/faraday/pull/1370) + +## [1.9.0](https://github.com/lostisland/faraday/compare/v1.8.0...v1.9.0) (2022-01-06) + +* Use external multipart and retry middleware by @iMacTia in [#1367](https://github.com/lostisland/faraday/pull/1367) + +## [1.8.0](https://github.com/lostisland/faraday/releases/tag/v1.8.0) (2021-09-18) + +### Features + +* Backport authorization procs (#1322, @jarl-dk) + +## [v1.7.0](https://github.com/lostisland/faraday/releases/tag/v1.7.0) (2021-08-09) + +### Features + +* Add strict_mode to Test::Stubs (#1298, @yykamei) + +## [v1.6.0](https://github.com/lostisland/faraday/releases/tag/v1.6.0) (2021-08-01) + +### Misc + +* Use external Rack adapter (#1296, @iMacTia) + +## [v1.5.1](https://github.com/lostisland/faraday/releases/tag/v1.5.1) (2021-07-11) + +### Fixes + +* Fix JRuby incompatibility after moving out EM adapters (#1294, @ahorek) + +### Documentation + +* Update YARD to follow RackBuilder (#1292, @kachick) + +## [v1.5.0](https://github.com/lostisland/faraday/releases/tag/v1.5.0) (2021-07-04) + +### Misc + +* Use external httpclient adapter (#1289, @iMacTia) +* Use external patron adapter (#1290, @iMacTia) + +## [v1.4.3](https://github.com/lostisland/faraday/releases/tag/v1.4.3) (2021-06-24) + +### Fixes + +* Silence warning (#1286, @gurgeous) +* Always dup url_prefix in Connection#build_exclusive_url (#1288, @alexeyds) + +## [v1.4.2](https://github.com/lostisland/faraday/releases/tag/v1.4.2) (2021-05-22) + +### Fixes +* Add proxy setting when url_prefix is changed (#1276, @ci) +* Default proxy scheme to http:// if necessary, fixes #1282 (#1283, @gurgeous) + +### Documentation +* Improve introduction page (#1273, @gurgeous) +* Docs: add more middleware examples (#1277, @gurgeous) + +### Misc +* Use external `em_http` and `em_synchrony` adapters (#1274, @iMacTia) + +## [v1.4.1](https://github.com/lostisland/faraday/releases/tag/v1.4.1) (2021-04-18) + +### Fixes + +* Fix dependencies from external adapter gems (#1269, @iMacTia) + +## [v1.4.0](https://github.com/lostisland/faraday/releases/tag/v1.4.0) (2021-04-16) + +### Highlights + +With this release, we continue the work of gradually moving out adapters into their own gems 🎉 +Thanks to @MikeRogers0 for helping the Faraday team in progressing with this quest 👏 + +And thanks to @olleolleolle efforts, Faraday is becoming more inclusive than ever 🤗 +Faraday's `master` branch has been renamed into `main`, we have an official policy on inclusive language and even a rubocop plugin to check for non-inclusive words ❤️! +Checkout the "Misc" section below for more details 🙌 ! + +### Fixes + +* Fix NoMethodError undefined method 'coverage' (#1255, @Maroo-b) + +### Documentation + +* Some docs on EventMachine adapters. (#1232, @damau) +* CONTRIBUTING: Fix grammar and layout (#1261, @olleolleolle) + +### Misc + +* Replacing Net::HTTP::Persistent with faraday-net_http_persistent (#1250, @MikeRogers0) +* CI: Configure the regenerated Coveralls token (#1256, @olleolleolle) +* Replace Excon adapter with Faraday::Excon gem, and fix autoloading issue with Faraday::NetHttpPersistent (#1257, @iMacTia) +* Drop CodeClimate (#1259, @olleolleolle) +* CI: Rename default branch to main (#1263, @olleolleolle) +* Drop RDoc support file .document (#1264, @olleolleolle, @iMacTia) +* CONTRIBUTING: add a policy on inclusive language (#1262, @olleolleolle) +* Add rubocop-inclusivity (#1267, @olleolleolle, @iMacTia) + +## [v1.3.1](https://github.com/lostisland/faraday/releases/tag/v1.3.1) (2021-04-16) + +### Fixes + +* Escape colon in path segment (#1237, @yarafan) +* Handle IPv6 address String on Faraday::Connection#proxy_from_env (#1252, @cosmo0920) + +### Documentation + +* Fix broken Rubydoc.info links (#1236, @nickcampbell18) +* Add httpx to list of external adapters (#1246, @HoneyryderChuck) + +### Misc + +* Refactor CI to remove duplicated line (#1230, @tricknotes) +* Gemspec: Pick a good ruby2_keywords release (#1241, @olleolleolle) + +## [v1.3.0](https://github.com/lostisland/faraday/releases/tag/v1.3.0) (2020-12-31) + +### Highlights +Faraday v1.3.0 is the first release to officially support Ruby 3.0 in the CI pipeline 🎉 🍾! + +This is also the first release with a previously "included" adapter (Net::HTTP) being isolated into a [separate gem](https://github.com/lostisland/faraday-net_http) 🎊! +The new adapter is added to Faraday as a dependency for now, so that means full backwards-compatibility, but just to be safe be careful when upgrading! + +This is a huge step towards are Faraday v2.0 objective of pushing adapters and middleware into separate gems. +Many thanks to the Faraday Team, @JanDintel and everyone who attended the [ROSS Conf remote event](https://www.rossconf.io/event/remote/) + +### Features + +* Improves consistency with Faraday::Error and Faraday::RaiseError (#1229, @qsona, @iMacTia) + +### Fixes + +* Don't assign to global ::Timer (#1227, @bpo) + +### Documentation + +* CHANGELOG: add releases after 1.0 (#1225, @olleolleolle) +* Improves retry middleware documentation. (#1228, @iMacTia) + +### Misc + +* Move out Net::HTTP adapter (#1222, @JanDintel, @iMacTia) +* Adds Ruby 3.0 to CI Matrix (#1226, @iMacTia) + + +## [v1.2.0](https://github.com/lostisland/faraday/releases/tag/v1.2.0) (2020-12-23) + +### Features + +* Introduces `on_request` and `on_complete` methods in `Faraday::Middleware`. (#1194, @iMacTia) + +### Fixes + +* Require 'date' to avoid retry exception (#1206, @rustygeldmacher) +* Fix rdebug recursion issue (#1205, @native-api) +* Update call to `em_http_ssl_patch` (#1202, @kylekeesling) +* `EmHttp` adapter: drop superfluous loaded? check (#1213, @olleolleolle) +* Avoid 1 use of keyword hackery (#1211, @grosser) +* Fix #1219 `Net::HTTP` still uses env proxy (#1221, @iMacTia) + +### Documentation + +* Add comment in gemspec to explain exposure of `examples` and `spec` folders. (#1192, @iMacTia) +* Adapters, how to create them (#1193, @olleolleolle) +* Update documentation on using the logger (#1196, @tijmenb) +* Adjust the retry documentation and spec to align with implementation (#1198, @nbeyer) + +### Misc + +* Test against ruby head (#1208, @grosser) + +## [v1.1.0](https://github.com/lostisland/faraday/releases/tag/v1.1.0) (2020-10-17) + +### Features + +* Makes parameters sorting configurable (#1162 @wishdev) +* Introduces `flat_encode` option for multipart adapter. (#1163 @iMacTia) +* Include request info in exceptions raised by RaiseError Middleware (#1181 @SandroDamilano) + +### Fixes + +* Avoid `last arg as keyword param` warning when building user middleware on Ruby 2.7 (#1153 @dgholz) +* Limits net-http-persistent version to < 4.0 (#1156 @iMacTia) +* Update `typhoeus` to new stable version (`1.4`) (#1159 @AlexWayfer) +* Properly fix test failure with Rack 2.1+. (#1171 @voxik) + +### Documentation + +* Improves documentation on how to contribute to the site by using Docker. (#1175 @iMacTia) +* Remove retry_change_requests from documentation (#1185 @stim371) + +### Misc + +* Link from GitHub Actions badge to CI workflow (#1141 @olleolleolle) +* Return tests of `Test` adapter (#1147 @AlexWayfer) +* Add 1.0 release to wording in CONTRIBUTING (#1155 @olleolleolle) +* Fix linting bumping Rubocop to 0.90.0 (#1182 @iMacTia) +* Drop `git ls-files` in gemspec (#1183 @utkarsh2102) +* Upgrade CI to ruby/setup-ruby (#1187 @gogainda) + +## [v1.0.1](https://github.com/lostisland/faraday/releases/tag/v1.0.1) (2020-03-29) + +### Fixes + +* Use Net::HTTP#start(&block) to ensure closed TCP connections (#1117) +* Fully qualify constants to be checked (#1122) +* Allows `parse` method to be private/protected in response middleware (#1123) +* Encode Spaces in Query Strings as '%20' Instead of '+' (#1125) +* Limits rack to v2.0.x (#1127) +* Adapter Registry reads also use mutex (#1136) + +### Documentation + +* Retry middleware documentation fix (#1109) +* Docs(retry): precise usage of retry-after (#1111) +* README: Link the logo to the website (#1112) +* Website: add search bar (#1116) +* Fix request/response mix-up in docs text (#1132) + +## [v1.0](https://github.com/lostisland/faraday/releases/tag/v1.0.0) (2020-01-22) + +Features: + +* Add #trace support to Faraday::Connection #861 (@technoweenie) +* Add the log formatter that is easy to override and safe to inherit #889 (@prikha) +* Support standalone adapters #941 (@iMacTia) +* Introduce Faraday::ConflictError for 409 response code #979 (@lucasmoreno) +* Add support for setting `read_timeout` option separately #1003 (@springerigor) +* Refactor and cleanup timeout settings across adapters #1022 (@technoweenie) +* Create ParamPart class to allow multipart posts with JSON content and file upload at the same time #1017 (@jeremy-israel) +* Copy UploadIO const -> FilePart for consistency with ParamPart #1018, #1021 (@technoweenie) +* Implement streaming responses in the Excon adapter #1026 (@technoweenie) +* Add default implementation of `Middleware#close`. #1069 (@ioquatix) +* Add `Adapter#close` so that derived classes can call super. #1091 (@ioquatix) +* Add log_level option to logger default formatter #1079 (@amrrbakry) +* Fix empty array for FlatParamsEncoder `{key: []} -> "key="` #1084 (@mrexox) + +Bugs: + +* Explicitly require date for DateTime library in Retry middleware #844 (@nickpresta) +* Refactor Adapter as final endpoints #846 (@iMacTia) +* Separate Request and Response bodies in Faraday::Env #847 (@iMacTia) +* Implement Faraday::Connection#options to make HTTP requests with the OPTIONS verb. #857 (@technoweenie) +* Multipart: Drop Ruby 1.8 String behavior compat #892 (@olleolleolle) +* Fix Ruby warnings in Faraday::Options.memoized #962 (@technoweenie) +* Allow setting min/max SSL version for a Net::HTTP::Persistent connection #972, #973 (@bdewater, @olleolleolle) +* Fix instances of frozen empty string literals #1040 (@BobbyMcWho) +* remove temp_proxy and improve proxy tests #1063 (@technoweenie) +* improve error initializer consistency #1095 (@technoweenie) + +Misc: + +* Convert minitest suite to RSpec #832 (@iMacTia, with help from @gaynetdinov, @Insti, @technoweenie) +* Major effort to update code to RuboCop standards. #854 (@olleolleolle, @iMacTia, @technoweenie, @htwroclau, @jherdman, @Drenmi, @Insti) +* Rubocop #1044, #1047 (@BobbyMcWho, @olleolleolle) +* Documentation tweaks (@adsteel, @Hubro, @iMacTia, @olleolleolle, @technoweenie) +* Update license year #981 (@Kevin-Kawai) +* Configure Jekyll plugin jekyll-remote-theme to support Docker usage #999 (@Lewiscowles1986) +* Fix Ruby 2.7 warnings #1009 (@tenderlove) +* Cleanup adapter connections #1023 (@technoweenie) +* Describe clearing cached stubs #1045 (@viraptor) +* Add project metadata to the gemspec #1046 (@orien) + +## v0.17.4 + +Fixes: + +* NetHttp adapter: wrap Errno::EADDRNOTAVAIL (#1114, @embs) +* Fix === for subclasses of deprecated classes (#1243, @mervync) + +## v0.17.3 + +Fixes: + +* Reverts changes in error classes hierarchy. #1092 (@iMacTia) +* Fix Ruby 1.9 syntax errors and improve Error class testing #1094 (@BanzaiMan, + @mrexox, @technoweenie) + +Misc: + +* Stops using `&Proc.new` for block forwarding. #1083 (@olleolleolle) +* Update CI to test against ruby 2.0-2.7 #1087, #1099 (@iMacTia, @olleolleolle, + @technoweenie) +* require FARADAY_DEPRECATE=warn to show Faraday v1.0 deprecation warnings + #1098 (@technoweenie) + +## v0.17.1 + +Final release before Faraday v1.0, with important fixes for Ruby 2.7. + +Fixes: + +* RaiseError response middleware raises exception if HTTP client returns a nil + status. #1042 (@jonnyom, @BobbyMcWho) + +Misc: + +* Fix Ruby 2.7 warnings (#1009) +* Add `Faraday::Deprecate` to warn about upcoming v1.0 changes. (#1054, #1059, + #1076, #1077) +* Add release notes up to current in CHANGELOG.md (#1066) +* Port minimal rspec suite from main branch to run backported tests. (#1058) + +## v0.17.0 + +This release is the same as v0.15.4. It was pushed to cover up releases +v0.16.0-v0.16.2. + +## v0.15.4 + +* Expose `pool_size` as a option for the NetHttpPersistent adapter (#834) + +## v0.15.3 + +* Make Faraday::Request serialisable with Marshal. (#803) +* Add DEFAULT_EXCEPTIONS constant to Request::Retry (#814) +* Add support for Ruby 2.6 Net::HTTP write_timeout (#824) + +## v0.15.2 + +* Prevents `Net::HTTP` adapters to retry request internally by setting `max_retries` to 0 if available (Ruby 2.5+). (#799) +* Fixes `NestedParamsEncoder` handling of empty array values (#801) + +## v0.15.1 + +* NetHttpPersistent adapter better reuse of SSL connections (#793) +* Refactor: inline cached_connection (#797) +* Logger middleware: use $stdout instead of STDOUT (#794) +* Fix: do not memoize/reuse Patron session (#796) + +Also in this release: + +* Allow setting min/max ssl version for Net::HTTP (#792) +* Allow setting min/max ssl version for Excon (#795) + +## v0.15.0 + +Features: + +* Added retry block option to retry middleware. (#770) +* Retry middleware improvements (honour Retry-After header, retry statuses) (#773) +* Improve response logger middleware output (#784) + +Fixes: + +* Remove unused class error (#767) +* Fix minor typo in README (#760) +* Reuse persistent connections when using net-http-persistent (#778) +* Fix Retry middleware documentation (#781) +* Returns the http response when giving up on retrying by status (#783) + +## v0.14.0 + +Features: + +* Allow overriding env proxy #754 (@iMacTia) +* Remove legacy Typhoeus adapter #715 (@olleolleolle) +* External Typhoeus Adapter Compatibility #748 (@iMacTia) +* Warn about missing adapter when making a request #743 (@antstorm) +* Faraday::Adapter::Test stubs now support entire urls (with host) #741 (@erik-escobedo) + +Fixes: + +* If proxy is manually provided, this takes priority over `find_proxy` #724 (@iMacTia) +* Fixes the behaviour for Excon's open_timeout (not setting write_timeout anymore) #731 (@apachelogger) +* Handle all connection timeout messages in Patron #687 (@stayhero) + +## v0.13.1 + +* Fixes an incompatibility with Addressable::URI being used as uri_parser + +## v0.13.0 + +Features: + +* Dynamically reloads the proxy when performing a request on an absolute domain (#701) +* Adapter support for Net::HTTP::Persistent v3.0.0 (#619) + +Fixes: + +* Prefer #hostname over #host. (#714) +* Fixes an edge-case issue with response headers parsing (missing HTTP header) (#719) + +## v0.12.2 + +* Parse headers from aggregated proxy requests/responses (#681) +* Guard against invalid middleware configuration with warning (#685) +* Do not use :insecure option by default in Patron (#691) +* Fixes an issue with HTTPClient not raising a `Faraday::ConnectionFailed` (#702) +* Fixes YAML serialization/deserialization for `Faraday::Utils::Headers` (#690) +* Fixes an issue with Options having a nil value (#694) +* Fixes an issue with Faraday.default_connection not using Faraday.default_connection_options (#698) +* Fixes an issue with Options.merge! and Faraday instrumentation middleware (#710) + +## v0.12.1 + +* Fix an issue with Patron tests failing on jruby +* Fix an issue with new `rewind_files` feature that was causing an exception when the body was not an Hash +* Expose wrapped_exception in all client errors +* Add Authentication Section to the ReadMe + +## v0.12.0.1 + +* Hotfix release to address an issue with TravisCI deploy on Rubygems + +## v0.12.0 + +Features: + +* Proxy feature now relies on Ruby `URI::Generic#find_proxy` and can use `no_proxy` ENV variable (not compatible with ruby < 2.0) +* Adds support for `context` request option to pass arbitrary information to middlewares + +Fixes: + +* Fix an issue with options that was causing new options to override defaults ones unexpectedly +* Rewind `UploadIO`s on retry to fix a compatibility issue +* Make multipart boundary unique +* Improvements in `README.md` + +## v0.11.0 + +Features: + +* Add `filter` method to Logger middleware +* Add support for Ruby2.4 and Minitest 6 +* Introduce block syntax to customise the adapter + +Fixes: + +* Fix an issue that was allowing to override `default_connection_options` from a connection instance +* Fix a bug that was causing newline escape characters ("\n") to be used when building the Authorization header + +## v0.10.1 + +- Fix an issue with HTTPClient adapter that was causing the SSL to be reset on every request +- Rescue `IOError` instead of specific subclass +- `Faraday::Utils::Headers` can now be successfully serialised in YAML +- Handle `default_connection_options` set with hash + +## v0.10.0 + +Breaking changes: +- Drop support for Ruby 1.8 + +Features: +- Include wrapped exception/response in ClientErrors +- Add `response.reason_phrase` +- Provide option to selectively skip logging request/response headers +- Add regex support for pattern matching in `test` adapter + +Fixes: +- Add `Faraday.respond_to?` to find methods managed by `method_missing` +- em-http: `request.host` instead of `connection.host` should be taken for SSL validations +- Allow `default_connection_options` to be merged when options are passed as url parameter +- Improve splitting key-value pairs in raw HTTP headers + +## v0.9.2 + +Adapters: +- Enable gzip compression for httpclient +- Fixes default certificate store for httpclient not having default paths. +- Make excon adapter compatible with 0.44 excon version +- Add compatibility with Patron 0.4.20 +- Determine default port numbers in Net::HTTP adapters (Addressable compatibility) +- em-http: wrap "connection closed by server" as ConnectionFailed type +- Wrap Errno::ETIMEDOUT in Faraday::Error::TimeoutError + +Utils: +- Add Rack-compatible support for parsing `a[][b]=c` nested queries +- Encode nil values in queries different than empty strings. Before: `a=`; now: `a`. +- Have `Faraday::Utils::Headers#replace` clear internal key cache +- Dup the internal key cache when a Headers hash is copied + +Env and middleware: +- Ensure `env` stored on middleware response has reference to the response +- Ensure that Response properties are initialized during `on_complete` (VCR compatibility) +- Copy request options in Faraday::Connection#dup +- Env custom members should be copied by Env.from(env) +- Honour per-request `request.options.params_encoder` +- Fix `interval_randomness` data type for Retry middleware +- Add maximum interval option for Retry middleware + +## v0.9.1 + +* Refactor Net:HTTP adapter so that with_net_http_connection can be overridden to allow pooled connections. (@Ben-M) +* Add configurable methods that bypass `retry_if` in the Retry request middleware. (@mike-bourgeous) + +## v0.9.0 + +* Add HTTPClient adapter (@hakanensari) +* Improve Retry handler (@mislav) +* Remove autoloading by default (@technoweenie) +* Improve internal docs (@technoweenie, @mislav) +* Respect user/password in http proxy string (@mislav) +* Adapter options are structs. Reinforces consistent options across adapters + (@technoweenie) +* Stop stripping trailing / off base URLs in a Faraday::Connection. (@technoweenie) +* Add a configurable URI parser. (@technoweenie) +* Remove need to manually autoload when using the authorization header helpers on `Faraday::Connection`. (@technoweenie) +* `Faraday::Adapter::Test` respects the `Faraday::RequestOptions#params_encoder` option. (@technoweenie) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/LICENSE.md b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/LICENSE.md new file mode 100644 index 000000000..38776159a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/LICENSE.md @@ -0,0 +1,20 @@ +Copyright (c) 2009-2023 Rick Olson, Zack Hobson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/README.md b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/README.md new file mode 100644 index 000000000..0cca6db87 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/README.md @@ -0,0 +1,67 @@ +# [![Faraday](./docs/_media/home-logo.svg)][website] + +[![Gem Version](https://badge.fury.io/rb/faraday.svg)](https://rubygems.org/gems/faraday) +[![GitHub Actions CI](https://github.com/lostisland/faraday/workflows/CI/badge.svg)](https://github.com/lostisland/faraday/actions?query=workflow%3ACI) +[![GitHub Discussions](https://img.shields.io/github/discussions/lostisland/faraday?logo=github)](https://github.com/lostisland/faraday/discussions) + +Faraday is an HTTP client library abstraction layer that provides a common interface over many +adapters (such as Net::HTTP) and embraces the concept of Rack middleware when processing the request/response cycle. +Take a look at [Awesome Faraday][awesome] for a list of available adapters and middleware. + +## Why use Faraday? + +Faraday gives you the power of Rack middleware for manipulating HTTP requests and responses, +making it easier to build sophisticated API clients or web service libraries that abstract away +the details of how HTTP requests are made. + +Faraday comes with a lot of features out of the box, such as: +* Support for multiple adapters (Net::HTTP, Typhoeus, Patron, Excon, HTTPClient, and more) +* Persistent connections (keep-alive) +* Parallel requests +* Automatic response parsing (JSON, XML, YAML) +* Customization of the request/response cycle with middleware +* Support for streaming responses +* Support for uploading files +* And much more! + +## Getting Started + +The best starting point is the [Faraday Website][website], with its introduction and explanation. + +Need more details? See the [Faraday API Documentation][apidoc] to see how it works internally, or take a look at [Advanced techniques for calling HTTP APIs in Ruby](https://mattbrictson.com/blog/advanced-http-techniques-in-ruby) blog post from [@mattbrictson](https://github.com/mattbrictson) 🚀 + +## Supported Ruby versions + +This library aims to support and is [tested against][actions] the currently officially supported Ruby +implementations. This means that, even without a major release, we could add or drop support for Ruby versions, +following their [EOL](https://endoflife.date/ruby). +Currently that means we support Ruby 3.0+ + +If something doesn't work on one of these Ruby versions, it's a bug. + +This library may inadvertently work (or seem to work) on other Ruby +implementations and versions, however support will only be provided for the versions listed +above. + +If you would like this library to support another Ruby version, you may +volunteer to be a maintainer. Being a maintainer entails making sure all tests +run and pass on that implementation. When something breaks on your +implementation, you will be responsible for providing patches in a timely +fashion. If critical issues for a particular implementation exist at the time +of a major release, support for that Ruby version may be dropped. + +## Contribute + +Do you want to contribute to Faraday? +Open the issues page and check for the `help wanted` label! +But before you start coding, please read our [Contributing Guide][contributing] + +## Copyright + +© 2009 - 2023, the Faraday Team. Website and branding design by [Elena Lo Piccolo](https://elelopic.design). + +[awesome]: https://github.com/lostisland/awesome-faraday/#adapters +[website]: https://lostisland.github.io/faraday +[contributing]: https://github.com/lostisland/faraday/blob/main/.github/CONTRIBUTING.md +[apidoc]: https://www.rubydoc.info/github/lostisland/faraday +[actions]: https://github.com/lostisland/faraday/actions diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/Rakefile b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/Rakefile new file mode 100644 index 000000000..a98c5113f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/Rakefile @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'rspec/core/rake_task' +require 'bundler' + +Bundler::GemHelper.install_tasks + +RSpec::Core::RakeTask.new(:spec) do |task| + task.ruby_opts = %w[-W] +end + +task default: :spec diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_spec.rb new file mode 100644 index 000000000..e30d86f79 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_spec.rb @@ -0,0 +1,119 @@ +# frozen_string_literal: true + +# Requires Ruby with rspec and faraday gems. +# rspec client_spec.rb + +require 'faraday' +require 'json' + +# Example API client +class Client + def initialize(conn) + @conn = conn + end + + def httpbingo(jname, params: {}) + res = @conn.get("/#{jname}", params) + data = JSON.parse(res.body) + data['origin'] + end + + def foo(params) + res = @conn.post('/foo', JSON.dump(params)) + res.status + end +end + +RSpec.describe Client do + let(:stubs) { Faraday::Adapter::Test::Stubs.new } + let(:conn) { Faraday.new { |b| b.adapter(:test, stubs) } } + let(:client) { Client.new(conn) } + + it 'parses origin' do + stubs.get('/ip') do |env| + # optional: you can inspect the Faraday::Env + expect(env.url.path).to eq('/ip') + [ + 200, + { 'Content-Type': 'application/javascript' }, + '{"origin": "127.0.0.1"}' + ] + end + + # uncomment to trigger stubs.verify_stubbed_calls failure + # stubs.get('/unused') { [404, {}, ''] } + + expect(client.httpbingo('ip')).to eq('127.0.0.1') + stubs.verify_stubbed_calls + end + + it 'handles 404' do + stubs.get('/api') do + [ + 404, + { 'Content-Type': 'application/javascript' }, + '{}' + ] + end + expect(client.httpbingo('api')).to be_nil + stubs.verify_stubbed_calls + end + + it 'handles exception' do + stubs.get('/api') do + raise Faraday::ConnectionFailed + end + + expect { client.httpbingo('api') }.to raise_error(Faraday::ConnectionFailed) + stubs.verify_stubbed_calls + end + + context 'When the test stub is run in strict_mode' do + let(:stubs) { Faraday::Adapter::Test::Stubs.new(strict_mode: true) } + + it 'verifies the all parameter values are identical' do + stubs.get('/api?abc=123') do + [ + 200, + { 'Content-Type': 'application/javascript' }, + '{"origin": "127.0.0.1"}' + ] + end + + # uncomment to raise Stubs::NotFound + # expect(client.httpbingo('api', params: { abc: 123, foo: 'Kappa' })).to eq('127.0.0.1') + expect(client.httpbingo('api', params: { abc: 123 })).to eq('127.0.0.1') + stubs.verify_stubbed_calls + end + end + + context 'When the Faraday connection is configured with FlatParamsEncoder' do + let(:conn) { Faraday.new(request: { params_encoder: Faraday::FlatParamsEncoder }) { |b| b.adapter(:test, stubs) } } + + it 'handles the same multiple URL parameters' do + stubs.get('/api?a=x&a=y&a=z') { [200, { 'Content-Type' => 'application/json' }, '{"origin": "127.0.0.1"}'] } + + # uncomment to raise Stubs::NotFound + # expect(client.httpbingo('api', params: { a: %w[x y] })).to eq('127.0.0.1') + expect(client.httpbingo('api', params: { a: %w[x y z] })).to eq('127.0.0.1') + stubs.verify_stubbed_calls + end + end + + context 'When you want to test the body, you can use a proc as well as string' do + it 'tests with a string' do + stubs.post('/foo', '{"name":"YK"}') { [200, {}, ''] } + + expect(client.foo(name: 'YK')).to eq 200 + stubs.verify_stubbed_calls + end + + it 'tests with a proc' do + check = ->(request_body) { JSON.parse(request_body).slice('name') == { 'name' => 'YK' } } + stubs.post('/foo', check) { [200, {}, ''] } + + expect(client.foo(name: 'YK', created_at: Time.now)).to eq 200 + stubs.verify_stubbed_calls + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_test.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_test.rb new file mode 100644 index 000000000..3aad95762 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/examples/client_test.rb @@ -0,0 +1,144 @@ +# frozen_string_literal: true + +# Requires Ruby with test-unit and faraday gems. +# ruby client_test.rb + +require 'faraday' +require 'json' +require 'test/unit' + +# Example API client +class Client + def initialize(conn) + @conn = conn + end + + def httpbingo(jname, params: {}) + res = @conn.get("/#{jname}", params) + data = JSON.parse(res.body) + data['origin'] + end + + def foo(params) + res = @conn.post('/foo', JSON.dump(params)) + res.status + end +end + +# Example API client test +class ClientTest < Test::Unit::TestCase + def test_httpbingo_name + stubs = Faraday::Adapter::Test::Stubs.new + stubs.get('/api') do |env| + # optional: you can inspect the Faraday::Env + assert_equal '/api', env.url.path + [ + 200, + { 'Content-Type': 'application/javascript' }, + '{"origin": "127.0.0.1"}' + ] + end + + # uncomment to trigger stubs.verify_stubbed_calls failure + # stubs.get('/unused') { [404, {}, ''] } + + cli = client(stubs) + assert_equal '127.0.0.1', cli.httpbingo('api') + stubs.verify_stubbed_calls + end + + def test_httpbingo_not_found + stubs = Faraday::Adapter::Test::Stubs.new + stubs.get('/api') do + [ + 404, + { 'Content-Type': 'application/javascript' }, + '{}' + ] + end + + cli = client(stubs) + assert_nil cli.httpbingo('api') + stubs.verify_stubbed_calls + end + + def test_httpbingo_exception + stubs = Faraday::Adapter::Test::Stubs.new + stubs.get('/api') do + raise Faraday::ConnectionFailed + end + + cli = client(stubs) + assert_raise Faraday::ConnectionFailed do + cli.httpbingo('api') + end + stubs.verify_stubbed_calls + end + + def test_strict_mode + stubs = Faraday::Adapter::Test::Stubs.new(strict_mode: true) + stubs.get('/api?abc=123') do + [ + 200, + { 'Content-Type': 'application/javascript' }, + '{"origin": "127.0.0.1"}' + ] + end + + cli = client(stubs) + assert_equal '127.0.0.1', cli.httpbingo('api', params: { abc: 123 }) + + # uncomment to raise Stubs::NotFound + # assert_equal '127.0.0.1', cli.httpbingo('api', params: { abc: 123, foo: 'Kappa' }) + stubs.verify_stubbed_calls + end + + def test_non_default_params_encoder + stubs = Faraday::Adapter::Test::Stubs.new(strict_mode: true) + stubs.get('/api?a=x&a=y&a=z') do + [ + 200, + { 'Content-Type': 'application/javascript' }, + '{"origin": "127.0.0.1"}' + ] + end + conn = Faraday.new(request: { params_encoder: Faraday::FlatParamsEncoder }) do |builder| + builder.adapter :test, stubs + end + + cli = Client.new(conn) + assert_equal '127.0.0.1', cli.httpbingo('api', params: { a: %w[x y z] }) + + # uncomment to raise Stubs::NotFound + # assert_equal '127.0.0.1', cli.httpbingo('api', params: { a: %w[x y] }) + stubs.verify_stubbed_calls + end + + def test_with_string_body + stubs = Faraday::Adapter::Test::Stubs.new do |stub| + stub.post('/foo', '{"name":"YK"}') { [200, {}, ''] } + end + cli = client(stubs) + assert_equal 200, cli.foo(name: 'YK') + + stubs.verify_stubbed_calls + end + + def test_with_proc_body + stubs = Faraday::Adapter::Test::Stubs.new do |stub| + check = ->(request_body) { JSON.parse(request_body).slice('name') == { 'name' => 'YK' } } + stub.post('/foo', check) { [200, {}, ''] } + end + cli = client(stubs) + assert_equal 200, cli.foo(name: 'YK', created_at: Time.now) + + stubs.verify_stubbed_calls + end + + def client(stubs) + conn = Faraday.new do |builder| + builder.adapter :test, stubs + end + Client.new(conn) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday.rb new file mode 100644 index 000000000..34f327031 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday.rb @@ -0,0 +1,158 @@ +# frozen_string_literal: true + +require 'cgi/escape' +require 'cgi/util' if RUBY_VERSION < '3.5' +require 'date' +require 'set' +require 'forwardable' +require 'faraday/version' +require 'faraday/methods' +require 'faraday/error' +require 'faraday/middleware_registry' +require 'faraday/utils' +require 'faraday/options' +require 'faraday/connection' +require 'faraday/rack_builder' +require 'faraday/parameters' +require 'faraday/middleware' +require 'faraday/adapter' +require 'faraday/request' +require 'faraday/response' +require 'faraday/net_http' +# This is the main namespace for Faraday. +# +# It provides methods to create {Connection} objects, and HTTP-related +# methods to use directly. +# +# @example Helpful class methods for easy usage +# Faraday.get "http://faraday.com" +# +# @example Helpful class method `.new` to create {Connection} objects. +# conn = Faraday.new "http://faraday.com" +# conn.get '/' +# +module Faraday + CONTENT_TYPE = 'Content-Type' + + class << self + # The root path that Faraday is being loaded from. + # + # This is the root from where the libraries are auto-loaded. + # + # @return [String] + attr_accessor :root_path + + # Gets or sets the path that the Faraday libs are loaded from. + # @return [String] + attr_accessor :lib_path + + # @overload default_adapter + # Gets the Symbol key identifying a default Adapter to use + # for the default {Faraday::Connection}. Defaults to `:net_http`. + # @return [Symbol] the default adapter + # @overload default_adapter=(adapter) + # Updates default adapter while resetting {.default_connection}. + # @return [Symbol] the new default_adapter. + attr_reader :default_adapter + + # Option for the default_adapter + # @return [Hash] default_adapter options + attr_accessor :default_adapter_options + + # Documented below, see default_connection + attr_writer :default_connection + + # Tells Faraday to ignore the environment proxy (http_proxy). + # Defaults to `false`. + # @return [Boolean] + attr_accessor :ignore_env_proxy + + # Initializes a new {Connection}. + # + # @param url [String,Hash] The optional String base URL to use as a prefix + # for all requests. Can also be the options Hash. Any of these + # values will be set on every request made, unless overridden + # for a specific request. + # @param options [Hash] + # @option options [String] :url Base URL + # @option options [Hash] :params Hash of unencoded URI query params. + # @option options [Hash] :headers Hash of unencoded HTTP headers. + # @option options [Hash] :request Hash of request options. + # @option options [Hash] :ssl Hash of SSL options. + # @option options [Hash] :proxy Hash of Proxy options. + # @return [Faraday::Connection] + # + # @example With an URL argument + # Faraday.new 'http://faraday.com' + # # => Faraday::Connection to http://faraday.com + # + # @example With an URL argument and an options hash + # Faraday.new 'http://faraday.com', params: { page: 1 } + # # => Faraday::Connection to http://faraday.com?page=1 + # + # @example With everything in an options hash + # Faraday.new url: 'http://faraday.com', + # params: { page: 1 } + # # => Faraday::Connection to http://faraday.com?page=1 + def new(url = nil, options = {}, &block) + options = Utils.deep_merge(default_connection_options, options) + Faraday::Connection.new(url, options, &block) + end + + # Documented elsewhere, see default_adapter reader + def default_adapter=(adapter) + @default_connection = nil + @default_adapter = adapter + end + + def respond_to_missing?(symbol, include_private = false) + default_connection.respond_to?(symbol, include_private) || super + end + + # @overload default_connection + # Gets the default connection used for simple scripts. + # @return [Faraday::Connection] a connection configured with + # the default_adapter. + # @overload default_connection=(connection) + # @param connection [Faraday::Connection] + # Sets the default {Faraday::Connection} for simple scripts that + # access the Faraday constant directly, such as + # Faraday.get "https://faraday.com". + def default_connection + @default_connection ||= Connection.new(default_connection_options) + end + + # Gets the default connection options used when calling {Faraday#new}. + # + # @return [Faraday::ConnectionOptions] + def default_connection_options + @default_connection_options ||= ConnectionOptions.new + end + + # Sets the default options used when calling {Faraday#new}. + # + # @param options [Hash, Faraday::ConnectionOptions] + def default_connection_options=(options) + @default_connection = nil + @default_connection_options = ConnectionOptions.from(options) + end + + private + + # Internal: Proxies method calls on the Faraday constant to + # .default_connection. + def method_missing(name, *args, &block) + if default_connection.respond_to?(name) + default_connection.send(name, *args, &block) + else + super + end + end + end + + self.ignore_env_proxy = false + self.root_path = File.expand_path __dir__ + self.lib_path = File.expand_path 'faraday', __dir__ + self.default_adapter = :net_http + self.default_adapter_options = {} +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter.rb new file mode 100644 index 000000000..1d9a45084 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +module Faraday + # Base class for all Faraday adapters. Adapters are + # responsible for fulfilling a Faraday request. + class Adapter + extend MiddlewareRegistry + + CONTENT_LENGTH = 'Content-Length' + + # This module marks an Adapter as supporting parallel requests. + module Parallelism + attr_writer :supports_parallel + + def supports_parallel? + @supports_parallel + end + + def inherited(subclass) + super + subclass.supports_parallel = supports_parallel? + end + end + + extend Parallelism + self.supports_parallel = false + + def initialize(_app = nil, opts = {}, &block) + @app = lambda(&:response) + @connection_options = opts + @config_block = block + end + + # Yields or returns an adapter's configured connection. Depends on + # #build_connection being defined on this adapter. + # + # @param env [Faraday::Env, Hash] The env object for a faraday request. + # + # @return The return value of the given block, or the HTTP connection object + # if no block is given. + def connection(env) + conn = build_connection(env) + return conn unless block_given? + + yield conn + end + + # Close any persistent connections. The adapter should still be usable + # after calling close. + def close + # Possible implementation: + # @app.close if @app.respond_to?(:close) + end + + def call(env) + env.clear_body if env.needs_body? + env.response = Response.new + end + + private + + def save_response(env, status, body, headers = nil, reason_phrase = nil, finished: true) + env.status = status + env.body = body + env.reason_phrase = reason_phrase&.to_s&.strip + env.response_headers = Utils::Headers.new.tap do |response_headers| + response_headers.update headers unless headers.nil? + yield(response_headers) if block_given? + end + + env.response.finish(env) unless env.parallel? || !finished + env.response + end + + # Fetches either a read, write, or open timeout setting. Defaults to the + # :timeout value if a more specific one is not given. + # + # @param type [Symbol] Describes which timeout setting to get: :read, + # :write, or :open. + # @param options [Hash] Hash containing Symbol keys like :timeout, + # :read_timeout, :write_timeout, or :open_timeout + # + # @return [Integer, nil] Timeout duration in seconds, or nil if no timeout + # has been set. + def request_timeout(type, options) + key = TIMEOUT_KEYS.fetch(type) do + msg = "Expected :read, :write, :open. Got #{type.inspect} :(" + raise ArgumentError, msg + end + options[key] || options[:timeout] + end + + TIMEOUT_KEYS = { + read: :read_timeout, + open: :open_timeout, + write: :write_timeout + }.freeze + end +end + +require 'faraday/adapter/test' diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter/test.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter/test.rb new file mode 100644 index 000000000..c637d139a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter/test.rb @@ -0,0 +1,311 @@ +# frozen_string_literal: true + +require 'timeout' + +module Faraday + class Adapter + # @example + # test = Faraday::Connection.new do + # use Faraday::Adapter::Test do |stub| + # # Define matcher to match the request + # stub.get '/resource.json' do + # # return static content + # [200, {'Content-Type' => 'application/json'}, 'hi world'] + # end + # + # # response with content generated based on request + # stub.get '/showget' do |env| + # [200, {'Content-Type' => 'text/plain'}, env[:method].to_s] + # end + # + # # A regular expression can be used as matching filter + # stub.get /\A\/items\/(\d+)\z/ do |env, meta| + # # in case regular expression is used, an instance of MatchData + # # can be received + # [200, + # {'Content-Type' => 'text/plain'}, + # "showing item: #{meta[:match_data][1]}" + # ] + # end + # + # # Test the request body is the same as the stubbed body + # stub.post('/bar', 'name=YK&word=call') { [200, {}, ''] } + # + # # You can pass a proc as a stubbed body and check the request body in your way. + # # In this case, the proc should return true or false. + # stub.post('/foo', ->(request_body) do + # JSON.parse(request_body).slice('name') == { 'name' => 'YK' } }) { [200, {}, ''] + # end + # + # # You can set strict_mode to exactly match the stubbed requests. + # stub.strict_mode = true + # end + # end + # + # resp = test.get '/resource.json' + # resp.body # => 'hi world' + # + # resp = test.get '/showget' + # resp.body # => 'get' + # + # resp = test.get '/items/1' + # resp.body # => 'showing item: 1' + # + # resp = test.get '/items/2' + # resp.body # => 'showing item: 2' + # + # resp = test.post '/bar', 'name=YK&word=call' + # resp.status # => 200 + # + # resp = test.post '/foo', JSON.dump(name: 'YK', created_at: Time.now) + # resp.status # => 200 + class Test < Faraday::Adapter + attr_accessor :stubs + + # A stack of Stubs + class Stubs + class NotFound < StandardError + end + + def initialize(strict_mode: false) + # { get: [Stub, Stub] } + @stack = {} + @consumed = {} + @strict_mode = strict_mode + @stubs_mutex = Monitor.new + yield(self) if block_given? + end + + def empty? + @stack.empty? + end + + # @param env [Faraday::Env] + def match(env) + request_method = env[:method] + return false unless @stack.key?(request_method) + + stack = @stack[request_method] + consumed = (@consumed[request_method] ||= []) + + @stubs_mutex.synchronize do + stub, meta = matches?(stack, env) + if stub + removed = stack.delete(stub) + consumed << removed unless removed.nil? + return stub, meta + end + end + matches?(consumed, env) + end + + def get(path, headers = {}, &block) + new_stub(:get, path, headers, &block) + end + + def head(path, headers = {}, &block) + new_stub(:head, path, headers, &block) + end + + def post(path, body = nil, headers = {}, &block) + new_stub(:post, path, headers, body, &block) + end + + def put(path, body = nil, headers = {}, &block) + new_stub(:put, path, headers, body, &block) + end + + def patch(path, body = nil, headers = {}, &block) + new_stub(:patch, path, headers, body, &block) + end + + def delete(path, headers = {}, &block) + new_stub(:delete, path, headers, &block) + end + + def options(path, headers = {}, &block) + new_stub(:options, path, headers, &block) + end + + # Raises an error if any of the stubbed calls have not been made. + def verify_stubbed_calls + failed_stubs = [] + @stack.each do |method, stubs| + next if stubs.empty? + + failed_stubs.concat( + stubs.map do |stub| + "Expected #{method} #{stub}." + end + ) + end + raise failed_stubs.join(' ') unless failed_stubs.empty? + end + + # Set strict_mode. If the value is true, this adapter tries to find matched requests strictly, + # which means that all of a path, parameters, and headers must be the same as an actual request. + def strict_mode=(value) + @strict_mode = value + @stack.each_value do |stubs| + stubs.each do |stub| + stub.strict_mode = value + end + end + end + + protected + + def new_stub(request_method, path, headers = {}, body = nil, &block) + normalized_path, host = + if path.is_a?(Regexp) + path + else + [ + Faraday::Utils.normalize_path(path), + Faraday::Utils.URI(path).host + ] + end + path, query = normalized_path.respond_to?(:split) ? normalized_path.split('?') : normalized_path + headers = Utils::Headers.new(headers) + + stub = Stub.new(host, path, query, headers, body, @strict_mode, block) + (@stack[request_method] ||= []) << stub + end + + # @param stack [Hash] + # @param env [Faraday::Env] + def matches?(stack, env) + stack.each do |stub| + match_result, meta = stub.matches?(env) + return stub, meta if match_result + end + nil + end + end + + # Stub request + Stub = Struct.new(:host, :path, :query, :headers, :body, :strict_mode, :block) do + # @param env [Faraday::Env] + def matches?(env) + request_host = env[:url].host + request_path = Faraday::Utils.normalize_path(env[:url].path) + request_headers = env.request_headers + request_body = env[:body] + + # meta is a hash used as carrier + # that will be yielded to consumer block + meta = {} + [(host.nil? || host == request_host) && + path_match?(request_path, meta) && + params_match?(env) && + body_match?(request_body) && + headers_match?(request_headers), meta] + end + + def path_match?(request_path, meta) + if path.is_a?(Regexp) + !!(meta[:match_data] = path.match(request_path)) + else + path == request_path + end + end + + # @param env [Faraday::Env] + def params_match?(env) + request_params = env[:params] + params = env.params_encoder.decode(query) || {} + + if strict_mode + return Set.new(params) == Set.new(request_params) + end + + params.keys.all? do |key| + request_params[key] == params[key] + end + end + + def headers_match?(request_headers) + if strict_mode + headers_with_user_agent = headers.dup.tap do |hs| + # NOTE: Set User-Agent in case it's not set when creating Stubs. + # Users would not want to set Faraday's User-Agent explicitly. + hs[:user_agent] ||= Connection::USER_AGENT + end + return Set.new(headers_with_user_agent) == Set.new(request_headers) + end + + headers.keys.all? do |key| + request_headers[key] == headers[key] + end + end + + def body_match?(request_body) + return true if body.to_s.empty? + + case body + when Proc + body.call(request_body) + else + request_body == body + end + end + + def to_s + "#{path} #{body}" + end + end + + def initialize(app, stubs = nil, &block) + super(app) + @stubs = stubs || Stubs.new + configure(&block) if block + end + + def configure + yield(stubs) + end + + # @param env [Faraday::Env] + def call(env) + super + + env.request.params_encoder ||= Faraday::Utils.default_params_encoder + env[:params] = env.params_encoder.decode(env[:url].query) || {} + stub, meta = stubs.match(env) + + unless stub + raise Stubs::NotFound, "no stubbed request for #{env[:method]} " \ + "#{env[:url]} #{env[:body]} #{env[:headers]}" + end + + block_arity = stub.block.arity + params = if block_arity >= 0 + [env, meta].take(block_arity) + else + [env, meta] + end + + timeout = request_timeout(:open, env[:request]) + timeout ||= request_timeout(:read, env[:request]) + + status, headers, body = + if timeout + ::Timeout.timeout(timeout, Faraday::TimeoutError) do + stub.block.call(*params) + end + else + stub.block.call(*params) + end + + # We need to explicitly pass `reason_phrase = nil` here to avoid keyword args conflicts. + # See https://github.com/lostisland/faraday/issues/1444 + # TODO: remove `nil` explicit reason_phrase once Ruby 3.0 becomes minimum req. version + save_response(env, status, body, headers, nil) + + @app.call(env) + end + end + end +end + +Faraday::Adapter.register_middleware(test: Faraday::Adapter::Test) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter_registry.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter_registry.rb new file mode 100644 index 000000000..1cd1e7e17 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/adapter_registry.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'monitor' + +module Faraday + # AdapterRegistry registers adapter class names so they can be looked up by a + # String or Symbol name. + class AdapterRegistry + def initialize + @lock = Monitor.new + @constants = {} + end + + def get(name) + klass = @lock.synchronize do + @constants[name] + end + return klass if klass + + Object.const_get(name).tap { |c| set(c, name) } + end + + def set(klass, name = nil) + name ||= klass.to_s + @lock.synchronize do + @constants[name] = klass + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/connection.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/connection.rb new file mode 100644 index 000000000..543cd4bb0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/connection.rb @@ -0,0 +1,564 @@ +# frozen_string_literal: true + +module Faraday + # Connection objects manage the default properties and the middleware + # stack for fulfilling an HTTP request. + # + # @example + # + # conn = Faraday::Connection.new 'http://httpbingo.org' + # + # # GET http://httpbingo.org/nigiri + # conn.get 'nigiri' + # # => # + # + class Connection + # A Set of allowed HTTP verbs. + METHODS = Set.new %i[get post put delete head patch options trace] + USER_AGENT = "Faraday v#{VERSION}".freeze + + # @return [Hash] URI query unencoded key/value pairs. + attr_reader :params + + # @return [Hash] unencoded HTTP header key/value pairs. + attr_reader :headers + + # @return [String] a URI with the prefix used for all requests from this + # Connection. This includes a default host name, scheme, port, and path. + attr_reader :url_prefix + + # @return [Faraday::RackBuilder] Builder for this Connection. + attr_reader :builder + + # @return [Hash] SSL options. + attr_reader :ssl + + # @return [Object] the parallel manager for this Connection. + attr_reader :parallel_manager + + # Sets the default parallel manager for this connection. + attr_writer :default_parallel_manager + + # @return [Hash] proxy options. + attr_reader :proxy + + # Initializes a new Faraday::Connection. + # + # @param url [URI, String] URI or String base URL to use as a prefix for all + # requests (optional). + # @param options [Hash, Faraday::ConnectionOptions] + # @option options [URI, String] :url ('http:/') URI or String base URL + # @option options [Hash String>] :params URI query unencoded + # key/value pairs. + # @option options [Hash String>] :headers Hash of unencoded HTTP + # header key/value pairs. + # @option options [Hash] :request Hash of request options. + # @option options [Hash] :ssl Hash of SSL options. + # @option options [Hash, URI, String] :proxy proxy options, either as a URL + # or as a Hash + # @option options [URI, String] :proxy[:uri] + # @option options [String] :proxy[:user] + # @option options [String] :proxy[:password] + # @yield [self] after all setup has been done + def initialize(url = nil, options = nil) + options = ConnectionOptions.from(options) + + if url.is_a?(Hash) || url.is_a?(ConnectionOptions) + options = Utils.deep_merge(options, url) + url = options.url + end + + @parallel_manager = nil + @headers = Utils::Headers.new + @params = Utils::ParamsHash.new + @options = options.request + @ssl = options.ssl + @default_parallel_manager = options.parallel_manager + @manual_proxy = nil + + @builder = options.builder || begin + # pass an empty block to Builder so it doesn't assume default middleware + options.new_builder(block_given? ? proc { |b| } : nil) + end + + self.url_prefix = url || 'http:/' + + @params.update(options.params) if options.params + @headers.update(options.headers) if options.headers + + initialize_proxy(url, options) + + yield(self) if block_given? + + @headers[:user_agent] ||= USER_AGENT + end + + def initialize_proxy(url, options) + @manual_proxy = !!options.proxy + @proxy = + if options.proxy + ProxyOptions.from(options.proxy) + else + proxy_from_env(url) + end + end + + # Sets the Hash of URI query unencoded key/value pairs. + # @param hash [Hash] + def params=(hash) + @params.replace hash + end + + # Sets the Hash of unencoded HTTP header key/value pairs. + # @param hash [Hash] + def headers=(hash) + @headers.replace hash + end + + extend Forwardable + + def_delegators :builder, :use, :request, :response, :adapter, :app + + # Closes the underlying resources and/or connections. In the case of + # persistent connections, this closes all currently open connections + # but does not prevent new connections from being made. + def close + app.close + end + + # @!method get(url = nil, params = nil, headers = nil) + # Makes a GET HTTP request without a body. + # @!scope class + # + # @param url [String, URI, nil] The optional String base URL to use as a prefix for + # all requests. Can also be the options Hash. + # @param params [Hash, nil] Hash of URI query unencoded key/value pairs. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.get '/items', { page: 1 }, :accept => 'application/json' + # + # # ElasticSearch example sending a body with GET. + # conn.get '/twitter/tweet/_search' do |req| + # req.headers[:content_type] = 'application/json' + # req.params[:routing] = 'kimchy' + # req.body = JSON.generate(query: {...}) + # end + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + + # @!method head(url = nil, params = nil, headers = nil) + # Makes a HEAD HTTP request without a body. + # @!scope class + # + # @param url [String, URI, nil] The optional String base URL to use as a prefix for + # all requests. Can also be the options Hash. + # @param params [Hash, nil] Hash of URI query unencoded key/value pairs. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.head '/items/1' + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + + # @!method delete(url = nil, params = nil, headers = nil) + # Makes a DELETE HTTP request without a body. + # @!scope class + # + # @param url [String, URI, nil] The optional String base URL to use as a prefix for + # all requests. Can also be the options Hash. + # @param params [Hash, nil] Hash of URI query unencoded key/value pairs. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.delete '/items/1' + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + + # @!method trace(url = nil, params = nil, headers = nil) + # Makes a TRACE HTTP request without a body. + # @!scope class + # + # @param url [String, URI, nil] The optional String base URL to use as a prefix for + # all requests. Can also be the options Hash. + # @param params [Hash, nil] Hash of URI query unencoded key/value pairs. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.connect '/items/1' + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + + # @!visibility private + METHODS_WITH_QUERY.each do |method| + class_eval <<-RUBY, __FILE__, __LINE__ + 1 + def #{method}(url = nil, params = nil, headers = nil) + run_request(:#{method}, url, nil, headers) do |request| + request.params.update(params) if params + yield request if block_given? + end + end + RUBY + end + + # @overload options() + # Returns current Connection options. + # + # @overload options(url, params = nil, headers = nil) + # Makes an OPTIONS HTTP request to the given URL. + # @param url [String, URI, nil] String base URL to sue as a prefix for all requests. + # @param params [Hash, nil] Hash of URI query unencoded key/value pairs. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.options '/items/1' + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + def options(*args) + return @options if args.empty? + + url, params, headers = *args + run_request(:options, url, nil, headers) do |request| + request.params.update(params) if params + yield request if block_given? + end + end + + # @!method post(url = nil, body = nil, headers = nil) + # Makes a POST HTTP request with a body. + # @!scope class + # + # @param url [String, URI, nil] The optional String base URL to use as a prefix for + # all requests. Can also be the options Hash. + # @param body [String, nil] body for the request. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.post '/items', data, content_type: 'application/json' + # + # # Simple ElasticSearch indexing sample. + # conn.post '/twitter/tweet' do |req| + # req.headers[:content_type] = 'application/json' + # req.params[:routing] = 'kimchy' + # req.body = JSON.generate(user: 'kimchy', ...) + # end + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + + # @!method put(url = nil, body = nil, headers = nil) + # Makes a PUT HTTP request with a body. + # @!scope class + # + # @param url [String, URI, nil] The optional String base URL to use as a prefix for + # all requests. Can also be the options Hash. + # @param body [String, nil] body for the request. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @example + # conn.put '/products/123', data, content_type: 'application/json' + # + # # Star a gist. + # conn.put 'https://api.github.com/gists/GIST_ID/star' do |req| + # req.headers['Accept'] = 'application/vnd.github+json' + # req.headers['Authorization'] = 'Bearer ' + # req.headers['X-GitHub-Api-Version'] = '2022-11-28' + # end + # + # @yield [Faraday::Request] for further request customizations + # @return [Faraday::Response] + + # @!visibility private + METHODS_WITH_BODY.each do |method| + class_eval <<-RUBY, __FILE__, __LINE__ + 1 + def #{method}(url = nil, body = nil, headers = nil, &block) + run_request(:#{method}, url, body, headers, &block) + end + RUBY + end + + # Check if the adapter is parallel-capable. + # + # @yield if the adapter isn't parallel-capable, or if no adapter is set yet. + # + # @return [Object, nil] a parallel manager or nil if yielded + # @api private + def default_parallel_manager + @default_parallel_manager ||= begin + adapter = @builder.adapter.klass if @builder.adapter + + if support_parallel?(adapter) + adapter.setup_parallel_manager + elsif block_given? + yield + end + end + end + + # Determine if this Faraday::Connection can make parallel requests. + # + # @return [Boolean] + def in_parallel? + !!@parallel_manager + end + + # Sets up the parallel manager to make a set of requests. + # + # @param manager [Object] The parallel manager that this Connection's + # Adapter uses. + # + # @yield a block to execute multiple requests. + # @return [void] + def in_parallel(manager = nil, &block) + @parallel_manager = manager || default_parallel_manager do + warn 'Warning: `in_parallel` called but no parallel-capable adapter ' \ + 'on Faraday stack' + warn caller[2, 10].join("\n") + nil + end + return yield unless @parallel_manager + + if @parallel_manager.respond_to?(:execute) + # Execute is the new method that is responsible for executing the block. + @parallel_manager.execute(&block) + else + # TODO: Old behaviour, deprecate and remove in 3.0 + yield + @parallel_manager.run + end + ensure + @parallel_manager = nil + end + + # Sets the Hash proxy options. + # + # @param new_value [Object] + def proxy=(new_value) + @manual_proxy = true + @proxy = new_value ? ProxyOptions.from(new_value) : nil + end + + def_delegators :url_prefix, :scheme, :scheme=, :host, :host=, :port, :port= + def_delegator :url_prefix, :path, :path_prefix + + # Parses the given URL with URI and stores the individual + # components in this connection. These components serve as defaults for + # requests made by this connection. + # + # @param url [String, URI] + # @param encoder [Object] + # + # @example + # + # conn = Faraday::Connection.new { ... } + # conn.url_prefix = "https://httpbingo.org/api" + # conn.scheme # => https + # conn.path_prefix # => "/api" + # + # conn.get("nigiri?page=2") # accesses https://httpbingo.org/api/nigiri + def url_prefix=(url, encoder = nil) + uri = @url_prefix = Utils.URI(url) + self.path_prefix = uri.path + + params.merge_query(uri.query, encoder) + uri.query = nil + + with_uri_credentials(uri) do |user, password| + set_basic_auth(user, password) + uri.user = uri.password = nil + end + + @proxy = proxy_from_env(url) unless @manual_proxy + end + + def set_basic_auth(user, password) + header = Faraday::Utils.basic_header_from(user, password) + headers[Faraday::Request::Authorization::KEY] = header + end + + # Sets the path prefix and ensures that it always has a leading + # slash. + # + # @param value [String] + # + # @return [String] the new path prefix + def path_prefix=(value) + url_prefix.path = if value + value = "/#{value}" unless value[0, 1] == '/' + value + end + end + + # Takes a relative url for a request and combines it with the defaults + # set on the connection instance. + # + # @param url [String, URI, nil] + # @param extra_params [Hash] + # + # @example + # conn = Faraday::Connection.new { ... } + # conn.url_prefix = "https://httpbingo.org/api?token=abc" + # conn.scheme # => https + # conn.path_prefix # => "/api" + # + # conn.build_url("nigiri?page=2") + # # => https://httpbingo.org/api/nigiri?token=abc&page=2 + # + # conn.build_url("nigiri", page: 2) + # # => https://httpbingo.org/api/nigiri?token=abc&page=2 + # + def build_url(url = nil, extra_params = nil) + uri = build_exclusive_url(url) + + query_values = params.dup.merge_query(uri.query, options.params_encoder) + query_values.update(extra_params) if extra_params + uri.query = + if query_values.empty? + nil + else + query_values.to_query(options.params_encoder) + end + + uri + end + + # Builds and runs the Faraday::Request. + # + # @param method [Symbol] HTTP method. + # @param url [String, URI, nil] String or URI to access. + # @param body [String, Hash, Array, nil] The request body that will eventually be converted to + # a string; middlewares can be used to support more complex types. + # @param headers [Hash, nil] unencoded HTTP header key/value pairs. + # + # @return [Faraday::Response] + def run_request(method, url, body, headers) + unless METHODS.include?(method) + raise ArgumentError, "unknown http method: #{method}" + end + + request = build_request(method) do |req| + req.options.proxy = proxy_for_request(url) + req.url(url) if url + req.headers.update(headers) if headers + req.body = body if body + yield(req) if block_given? + end + + builder.build_response(self, request) + end + + # Creates and configures the request object. + # + # @param method [Symbol] + # + # @yield [Faraday::Request] if block given + # @return [Faraday::Request] + def build_request(method) + Request.create(method) do |req| + req.params = params.dup + req.headers = headers.dup + req.options = options.dup + yield(req) if block_given? + end + end + + # Build an absolute URL based on url_prefix. + # + # @param url [String, URI, nil] + # @param params [Faraday::Utils::ParamsHash] A Faraday::Utils::ParamsHash to + # replace the query values + # of the resulting url (default: nil). + # + # @return [URI] + def build_exclusive_url(url = nil, params = nil, params_encoder = nil) + url = nil if url.respond_to?(:empty?) && url.empty? + base = url_prefix.dup + if url && !base.path.end_with?('/') + base.path = "#{base.path}/" # ensure trailing slash + end + # Ensure relative url will be parsed correctly (such as `service:search` ) + url = "./#{url}" if url.respond_to?(:start_with?) && !url.start_with?('http://', 'https://', '/', './', '../') + uri = url ? base + url : base + if params + uri.query = params.to_query(params_encoder || options.params_encoder) + end + uri.query = nil if uri.query && uri.query.empty? + uri + end + + # Creates a duplicate of this Faraday::Connection. + # + # @api private + # + # @return [Faraday::Connection] + def dup + self.class.new(build_exclusive_url, + headers: headers.dup, + params: params.dup, + builder: builder.dup, + ssl: ssl.dup, + request: options.dup) + end + + # Yields username and password extracted from a URI if they both exist. + # + # @param uri [URI] + # @yield [username, password] any username and password + # @yieldparam username [String] any username from URI + # @yieldparam password [String] any password from URI + # @return [void] + # @api private + def with_uri_credentials(uri) + return unless uri.user && uri.password + + yield(Utils.unescape(uri.user), Utils.unescape(uri.password)) + end + + def proxy_from_env(url) + return if Faraday.ignore_env_proxy + + uri = nil + case url + when String + uri = Utils.URI(url) + uri = if uri.host.nil? + find_default_proxy + else + URI.parse("#{uri.scheme}://#{uri.host}").find_proxy + end + when URI + uri = url.find_proxy + when nil + uri = find_default_proxy + end + ProxyOptions.from(uri) if uri + end + + def find_default_proxy + uri = ENV.fetch('http_proxy', nil) + return unless uri && !uri.empty? + + uri = "http://#{uri}" unless uri.match?(/^http/i) + uri + end + + def proxy_for_request(url) + return proxy if @manual_proxy + + if url && Utils.URI(url).absolute? + proxy_from_env(url) + else + proxy + end + end + + def support_parallel?(adapter) + adapter.respond_to?(:supports_parallel?) && adapter&.supports_parallel? + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/flat_params_encoder.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/flat_params_encoder.rb new file mode 100644 index 000000000..bc10c8b9a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/flat_params_encoder.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +module Faraday + # FlatParamsEncoder manages URI params as a flat hash. Any Array values repeat + # the parameter multiple times. + module FlatParamsEncoder + class << self + extend Forwardable + def_delegators :'Faraday::Utils', :escape, :unescape + end + + # Encode converts the given param into a URI querystring. Keys and values + # will converted to strings and appropriately escaped for the URI. + # + # @param params [Hash] query arguments to convert. + # + # @example + # + # encode({a: %w[one two three], b: true, c: "C"}) + # # => 'a=one&a=two&a=three&b=true&c=C' + # + # @return [String] the URI querystring (without the leading '?') + def self.encode(params) + return nil if params.nil? + + unless params.is_a?(Array) + unless params.respond_to?(:to_hash) + raise TypeError, + "Can't convert #{params.class} into Hash." + end + params = params.to_hash + params = params.map do |key, value| + key = key.to_s if key.is_a?(Symbol) + [key, value] + end + + # Only to be used for non-Array inputs. Arrays should preserve order. + params.sort! if @sort_params + end + + # The params have form [['key1', 'value1'], ['key2', 'value2']]. + buffer = +'' + params.each do |key, value| + encoded_key = escape(key) + if value.nil? + buffer << "#{encoded_key}&" + elsif value.is_a?(Array) + if value.empty? + buffer << "#{encoded_key}=&" + else + value.each do |sub_value| + encoded_value = escape(sub_value) + buffer << "#{encoded_key}=#{encoded_value}&" + end + end + else + encoded_value = escape(value) + buffer << "#{encoded_key}=#{encoded_value}&" + end + end + buffer.chop + end + + # Decode converts the given URI querystring into a hash. + # + # @param query [String] query arguments to parse. + # + # @example + # + # decode('a=one&a=two&a=three&b=true&c=C') + # # => {"a"=>["one", "two", "three"], "b"=>"true", "c"=>"C"} + # + # @return [Hash] parsed keys and value strings from the querystring. + def self.decode(query) + return nil if query.nil? + + empty_accumulator = {} + + split_query = (query.split('&').map do |pair| + pair.split('=', 2) if pair && !pair.empty? + end).compact + split_query.each_with_object(empty_accumulator.dup) do |pair, accu| + pair[0] = unescape(pair[0]) + pair[1] = true if pair[1].nil? + if pair[1].respond_to?(:to_str) + pair[1] = unescape(pair[1].to_str.tr('+', ' ')) + end + if accu[pair[0]].is_a?(Array) + accu[pair[0]] << pair[1] + elsif accu[pair[0]] + accu[pair[0]] = [accu[pair[0]], pair[1]] + else + accu[pair[0]] = pair[1] + end + end + end + + class << self + attr_accessor :sort_params + end + + # Useful default for OAuth and caching. + @sort_params = true + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/nested_params_encoder.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/nested_params_encoder.rb new file mode 100644 index 000000000..3ca3e73e7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/encoders/nested_params_encoder.rb @@ -0,0 +1,183 @@ +# frozen_string_literal: true + +module Faraday + # Sub-module for encoding parameters into query-string. + module EncodeMethods + # @param params [nil, Array, #to_hash] parameters to be encoded + # + # @return [String] the encoded params + # + # @raise [TypeError] if params can not be converted to a Hash + def encode(params) + return nil if params.nil? + + unless params.is_a?(Array) + unless params.respond_to?(:to_hash) + raise TypeError, "Can't convert #{params.class} into Hash." + end + + params = params.to_hash + params = params.map do |key, value| + key = key.to_s if key.is_a?(Symbol) + [key, value] + end + + # Only to be used for non-Array inputs. Arrays should preserve order. + params.sort! if @sort_params + end + + # The params have form [['key1', 'value1'], ['key2', 'value2']]. + buffer = +'' + params.each do |parent, value| + encoded_parent = escape(parent) + buffer << "#{encode_pair(encoded_parent, value)}&" + end + buffer.chop + end + + protected + + def encode_pair(parent, value) + if value.is_a?(Hash) + encode_hash(parent, value) + elsif value.is_a?(Array) + encode_array(parent, value) + elsif value.nil? + parent + else + encoded_value = escape(value) + "#{parent}=#{encoded_value}" + end + end + + def encode_hash(parent, value) + value = value.map { |key, val| [escape(key), val] }.sort + + buffer = +'' + value.each do |key, val| + new_parent = "#{parent}%5B#{key}%5D" + buffer << "#{encode_pair(new_parent, val)}&" + end + buffer.chop + end + + def encode_array(parent, value) + return "#{parent}%5B%5D" if value.empty? + + buffer = +'' + value.each_with_index do |val, index| + new_parent = if @array_indices + "#{parent}%5B#{index}%5D" + else + "#{parent}%5B%5D" + end + buffer << "#{encode_pair(new_parent, val)}&" + end + buffer.chop + end + end + + # Sub-module for decoding query-string into parameters. + module DecodeMethods + # @param query [nil, String] + # + # @return [Array] the decoded params + # + # @raise [TypeError] if the nesting is incorrect + def decode(query) + return nil if query.nil? + + params = {} + query.split('&').each do |pair| + next if pair.empty? + + key, value = pair.split('=', 2) + key = unescape(key) + value = unescape(value.tr('+', ' ')) if value + decode_pair(key, value, params) + end + + dehash(params, 0) + end + + protected + + SUBKEYS_REGEX = /[^\[\]]+(?:\]?\[\])?/ + + def decode_pair(key, value, context) + subkeys = key.scan(SUBKEYS_REGEX) + subkeys.each_with_index do |subkey, i| + is_array = subkey =~ /[\[\]]+\Z/ + subkey = Regexp.last_match.pre_match if is_array + last_subkey = i == subkeys.length - 1 + + context = prepare_context(context, subkey, is_array, last_subkey) + add_to_context(is_array, context, value, subkey) if last_subkey + end + end + + def prepare_context(context, subkey, is_array, last_subkey) + if !last_subkey || is_array + context = new_context(subkey, is_array, context) + end + if context.is_a?(Array) && !is_array + context = match_context(context, subkey) + end + context + end + + def new_context(subkey, is_array, context) + value_type = is_array ? Array : Hash + if context[subkey] && !context[subkey].is_a?(value_type) + raise TypeError, "expected #{value_type.name} " \ + "(got #{context[subkey].class.name}) for param `#{subkey}'" + end + + context[subkey] ||= value_type.new + end + + def match_context(context, subkey) + context << {} if !context.last.is_a?(Hash) || context.last.key?(subkey) + context.last + end + + def add_to_context(is_array, context, value, subkey) + is_array ? context << value : context[subkey] = value + end + + # Internal: convert a nested hash with purely numeric keys into an array. + # FIXME: this is not compatible with Rack::Utils.parse_nested_query + # @!visibility private + def dehash(hash, depth) + hash.each do |key, value| + hash[key] = dehash(value, depth + 1) if value.is_a?(Hash) + end + + if depth.positive? && !hash.empty? && hash.keys.all? { |k| k =~ /^\d+$/ } + hash.sort.map(&:last) + else + hash + end + end + end + + # This is the default encoder for Faraday requests. + # Using this encoder, parameters will be encoded respecting their structure, + # so you can send objects such as Arrays or Hashes as parameters + # for your requests. + module NestedParamsEncoder + class << self + attr_accessor :sort_params, :array_indices + + extend Forwardable + def_delegators :'Faraday::Utils', :escape, :unescape + end + + # Useful default for OAuth and caching. + @sort_params = true + @array_indices = false + + extend EncodeMethods + extend DecodeMethods + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/error.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/error.rb new file mode 100644 index 000000000..12ff15d77 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/error.rb @@ -0,0 +1,199 @@ +# frozen_string_literal: true + +# Faraday namespace. +module Faraday + # Faraday error base class. + class Error < StandardError + attr_reader :response, :wrapped_exception + + def initialize(exc = nil, response = nil) + @wrapped_exception = nil unless defined?(@wrapped_exception) + @response = nil unless defined?(@response) + super(exc_msg_and_response!(exc, response)) + end + + def backtrace + if @wrapped_exception + @wrapped_exception.backtrace + else + super + end + end + + def inspect + inner = +'' + inner << " wrapped=#{@wrapped_exception.inspect}" if @wrapped_exception + inner << " response=#{@response.inspect}" if @response + inner << " #{super}" if inner.empty? + %(#<#{self.class}#{inner}>) + end + + def response_status + return unless @response + + @response.is_a?(Faraday::Response) ? @response.status : @response[:status] + end + + def response_headers + return unless @response + + @response.is_a?(Faraday::Response) ? @response.headers : @response[:headers] + end + + def response_body + return unless @response + + @response.is_a?(Faraday::Response) ? @response.body : @response[:body] + end + + protected + + # Pulls out potential parent exception and response hash, storing them in + # instance variables. + # exc - Either an Exception, a string message, or a response hash. + # response - Hash + # :status - Optional integer HTTP response status + # :headers - String key/value hash of HTTP response header + # values. + # :body - Optional string HTTP response body. + # :request - Hash + # :method - Symbol with the request HTTP method. + # :url - URI object with the url requested. + # :url_path - String with the url path requested. + # :params - String key/value hash of query params + # present in the request. + # :headers - String key/value hash of HTTP request + # header values. + # :body - String HTTP request body. + # + # If a subclass has to call this, then it should pass a string message + # to `super`. See NilStatusError. + def exc_msg_and_response!(exc, response = nil) + if @response.nil? && @wrapped_exception.nil? + @wrapped_exception, msg, @response = exc_msg_and_response(exc, response) + return msg + end + + exc.to_s + end + + # Pulls out potential parent exception and response hash. + def exc_msg_and_response(exc, response = nil) + case exc + when Exception + [exc, exc.message, response] + when Hash + [nil, build_error_message_from_hash(exc), exc] + when Faraday::Env + [nil, build_error_message_from_env(exc), exc] + else + [nil, exc.to_s, response] + end + end + + private + + def build_error_message_from_hash(hash) + # Be defensive with external Hash objects - they might be missing keys + status = hash.fetch(:status, nil) + request = hash.fetch(:request, nil) + + return fallback_error_message(status) if request.nil? + + method = request.fetch(:method, nil) + url = request.fetch(:url, nil) + build_status_error_message(status, method, url) + end + + def build_error_message_from_env(env) + # Faraday::Env is internal - we can make reasonable assumptions about its structure + build_status_error_message(env.status, env.method, env.url) + end + + def build_status_error_message(status, method, url) + method_str = method ? method.to_s.upcase : '' + url_str = url ? url.to_s : '' + "the server responded with status #{status} for #{method_str} #{url_str}" + end + + def fallback_error_message(status) + "the server responded with status #{status} - method and url are not available " \ + 'due to include_request: false on Faraday::Response::RaiseError middleware' + end + end + + # Faraday client error class. Represents 4xx status responses. + class ClientError < Error + end + + # Raised by Faraday::Response::RaiseError in case of a 400 response. + class BadRequestError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 401 response. + class UnauthorizedError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 403 response. + class ForbiddenError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 404 response. + class ResourceNotFound < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 407 response. + class ProxyAuthError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 408 response. + class RequestTimeoutError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 409 response. + class ConflictError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 422 response. + class UnprocessableEntityError < ClientError + end + + # Raised by Faraday::Response::RaiseError in case of a 429 response. + class TooManyRequestsError < ClientError + end + + # Faraday server error class. Represents 5xx status responses. + class ServerError < Error + end + + # A unified client error for timeouts. + class TimeoutError < ServerError + def initialize(exc = 'timeout', response = nil) + super(exc, response) + end + end + + # Raised by Faraday::Response::RaiseError in case of a nil status in response. + class NilStatusError < ServerError + def initialize(exc, response = nil) + exc_msg_and_response!(exc, response) + super('http status could not be derived from the server response') + end + end + + # A unified error for failed connections. + class ConnectionFailed < Error + end + + # A unified client error for SSL errors. + class SSLError < Error + end + + # Raised by middlewares that parse the response, like the JSON response middleware. + class ParsingError < Error + end + + # Raised by Faraday::Middleware and subclasses when invalid default_options are used + class InitializationError < Error + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/logging/formatter.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/logging/formatter.rb new file mode 100644 index 000000000..2fd4bb1da --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/logging/formatter.rb @@ -0,0 +1,118 @@ +# frozen_string_literal: true + +require 'pp' # This require is necessary for Hash#pretty_inspect to work, do not remove it, people rely on it. + +module Faraday + module Logging + # Serves as an integration point to customize logging + class Formatter + extend Forwardable + + DEFAULT_OPTIONS = { headers: true, bodies: false, errors: false, + log_level: :info }.freeze + + def initialize(logger:, options:) + @logger = logger + @options = DEFAULT_OPTIONS.merge(options) + unless %i[debug info warn error fatal].include?(@options[:log_level]) + @options[:log_level] = :info + end + @filter = [] + end + + def_delegators :@logger, :debug, :info, :warn, :error, :fatal + + def request(env) + public_send(log_level) do + "request: #{env.method.upcase} #{apply_filters(env.url.to_s)}" + end + + log_headers('request', env.request_headers) if log_headers?(:request) + log_body('request', env[:body]) if env[:body] && log_body?(:request) + end + + def response(env) + public_send(log_level) { "response: Status #{env.status}" } + + log_headers('response', env.response_headers) if log_headers?(:response) + log_body('response', env[:body]) if env[:body] && log_body?(:response) + end + + def exception(exc) + return unless log_errors? + + public_send(log_level) { "error: #{exc.full_message}" } + + log_headers('error', exc.response_headers) if exc.respond_to?(:response_headers) && log_headers?(:error) + return unless exc.respond_to?(:response_body) && exc.response_body && log_body?(:error) + + log_body('error', exc.response_body) + end + + def filter(filter_word, filter_replacement) + @filter.push([filter_word, filter_replacement]) + end + + private + + def dump_headers(headers) + return if headers.nil? + + headers.map { |k, v| "#{k}: #{v.inspect}" }.join("\n") + end + + def dump_body(body) + if body.respond_to?(:to_str) + body.to_str + else + pretty_inspect(body) + end + end + + def pretty_inspect(body) + body.pretty_inspect + end + + def log_headers?(type) + case @options[:headers] + when Hash + @options[:headers][type] + else + @options[:headers] + end + end + + def log_body?(type) + case @options[:bodies] + when Hash + @options[:bodies][type] + else + @options[:bodies] + end + end + + def log_errors? + @options[:errors] + end + + def apply_filters(output) + @filter.each do |pattern, replacement| + output = output.to_s.gsub(pattern, replacement) + end + output + end + + def log_level + @options[:log_level] + end + + def log_headers(type, headers) + public_send(log_level) { "#{type}: #{apply_filters(dump_headers(headers))}" } + end + + def log_body(type, body) + public_send(log_level) { "#{type}: #{apply_filters(dump_body(body))}" } + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/methods.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/methods.rb new file mode 100644 index 000000000..53e390379 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/methods.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +module Faraday + METHODS_WITH_QUERY = %w[get head delete trace].freeze + METHODS_WITH_BODY = %w[post put patch].freeze +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware.rb new file mode 100644 index 000000000..52d8287bc --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require 'monitor' + +module Faraday + # Middleware is the basic base class of any Faraday middleware. + class Middleware + extend MiddlewareRegistry + + attr_reader :app, :options + + DEFAULT_OPTIONS = {}.freeze + LOCK = Mutex.new + + def initialize(app = nil, options = {}) + @app = app + @options = self.class.default_options.merge(options) + end + + class << self + # Faraday::Middleware::default_options= allows user to set default options at the Faraday::Middleware + # class level. + # + # @example Set the Faraday::Response::RaiseError option, `include_request` to `false` + # my_app/config/initializers/my_faraday_middleware.rb + # + # Faraday::Response::RaiseError.default_options = { include_request: false } + # + def default_options=(options = {}) + validate_default_options(options) + LOCK.synchronize do + @default_options = default_options.merge(options) + end + end + + # default_options attr_reader that initializes class instance variable + # with the values of any Faraday::Middleware defaults, and merges with + # subclass defaults + def default_options + @default_options ||= DEFAULT_OPTIONS.merge(self::DEFAULT_OPTIONS) + end + + private + + def validate_default_options(options) + invalid_keys = options.keys.reject { |opt| self::DEFAULT_OPTIONS.key?(opt) } + return unless invalid_keys.any? + + raise(Faraday::InitializationError, + "Invalid options provided. Keys not found in #{self}::DEFAULT_OPTIONS: #{invalid_keys.join(', ')}") + end + end + + def call(env) + on_request(env) if respond_to?(:on_request) + app.call(env).on_complete do |environment| + on_complete(environment) if respond_to?(:on_complete) + end + rescue StandardError => e + on_error(e) if respond_to?(:on_error) + raise + end + + def close + if app.respond_to?(:close) + app.close + else + warn "#{app} does not implement \#close!" + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware_registry.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware_registry.rb new file mode 100644 index 000000000..fc70e2b87 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/middleware_registry.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +require 'monitor' + +module Faraday + # Adds the ability for other modules to register and lookup + # middleware classes. + module MiddlewareRegistry + def registered_middleware + @registered_middleware ||= {} + end + + # Register middleware class(es) on the current module. + # + # @param mappings [Hash] Middleware mappings from a lookup symbol to a middleware class. + # @return [void] + # + # @example Lookup by a constant + # + # module Faraday + # class Whatever < Middleware + # # Middleware looked up by :foo returns Faraday::Whatever::Foo. + # register_middleware(foo: Whatever) + # end + # end + def register_middleware(**mappings) + middleware_mutex do + registered_middleware.update(mappings) + end + end + + # Unregister a previously registered middleware class. + # + # @param key [Symbol] key for the registered middleware. + def unregister_middleware(key) + registered_middleware.delete(key) + end + + # Lookup middleware class with a registered Symbol shortcut. + # + # @param key [Symbol] key for the registered middleware. + # @return [Class] a middleware Class. + # @raise [Faraday::Error] if given key is not registered + # + # @example + # + # module Faraday + # class Whatever < Middleware + # register_middleware(foo: Whatever) + # end + # end + # + # Faraday::Middleware.lookup_middleware(:foo) + # # => Faraday::Whatever + def lookup_middleware(key) + load_middleware(key) || + raise(Faraday::Error, "#{key.inspect} is not registered on #{self}") + end + + private + + def middleware_mutex(&block) + @middleware_mutex ||= Monitor.new + @middleware_mutex.synchronize(&block) + end + + def load_middleware(key) + value = registered_middleware[key] + case value + when Module + value + when Symbol, String + middleware_mutex do + @registered_middleware[key] = const_get(value) + end + when Proc + middleware_mutex do + @registered_middleware[key] = value.call + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options.rb new file mode 100644 index 000000000..b3e0dea1a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options.rb @@ -0,0 +1,219 @@ +# frozen_string_literal: true + +module Faraday + # Subclasses Struct with some special helpers for converting from a Hash to + # a Struct. + class Options < Struct + # Public + def self.from(value) + value ? new.update(value) : new + end + + # Public + def each + return to_enum(:each) unless block_given? + + members.each do |key| + yield(key.to_sym, send(key)) + end + end + + # Public + def update(obj) + obj.each do |key, value| + sub_options = self.class.options_for(key) + if sub_options + new_value = sub_options.from(value) if value + elsif value.is_a?(Hash) + new_value = value.dup + else + new_value = value + end + + send(:"#{key}=", new_value) unless new_value.nil? + end + self + end + + # Public + def delete(key) + value = send(key) + send(:"#{key}=", nil) + value + end + + # Public + def clear + members.each { |member| delete(member) } + end + + # Public + def merge!(other) + other.each do |key, other_value| + self_value = send(key) + sub_options = self.class.options_for(key) + new_value = if self_value && sub_options && other_value + self_value.merge(other_value) + else + other_value + end + send(:"#{key}=", new_value) unless new_value.nil? + end + self + end + + # Public + def merge(other) + dup.merge!(other) + end + + # Public + def deep_dup + self.class.from(self) + end + + # Public + def fetch(key, *args) + unless symbolized_key_set.include?(key.to_sym) + key_setter = "#{key}=" + if !args.empty? + send(key_setter, args.first) + elsif block_given? + send(key_setter, yield(key)) + else + raise self.class.fetch_error_class, "key not found: #{key.inspect}" + end + end + send(key) + end + + # Public + def values_at(*keys) + keys.map { |key| send(key) } + end + + # Public + def keys + members.reject { |member| send(member).nil? } + end + + # Public + def empty? + keys.empty? + end + + # Public + def each_key(&block) + return to_enum(:each_key) unless block + + keys.each(&block) + end + + # Public + def key?(key) + keys.include?(key) + end + + alias has_key? key? + + # Public + def each_value(&block) + return to_enum(:each_value) unless block + + values.each(&block) + end + + # Public + def value?(value) + values.include?(value) + end + + alias has_value? value? + + # Public + def to_hash + hash = {} + members.each do |key| + value = send(key) + hash[key.to_sym] = value unless value.nil? + end + hash + end + + # Internal + def inspect + values = [] + members.each do |member| + value = send(member) + values << "#{member}=#{value.inspect}" if value + end + values = values.empty? ? '(empty)' : values.join(', ') + + %(#<#{self.class} #{values}>) + end + + # Internal + def self.options(mapping) + attribute_options.update(mapping) + end + + # Internal + def self.options_for(key) + attribute_options[key] + end + + # Internal + def self.attribute_options + @attribute_options ||= {} + end + + def self.memoized(key, &block) + unless block + raise ArgumentError, '#memoized must be called with a block' + end + + memoized_attributes[key.to_sym] = block + class_eval <<-RUBY, __FILE__, __LINE__ + 1 + remove_method(key) if method_defined?(key, false) + def #{key}() self[:#{key}]; end + RUBY + end + + def self.memoized_attributes + @memoized_attributes ||= {} + end + + def [](key) + key = key.to_sym + if (method = self.class.memoized_attributes[key]) + super(key) || (self[key] = instance_eval(&method)) + else + super + end + end + + def symbolized_key_set + @symbolized_key_set ||= Set.new(keys.map(&:to_sym)) + end + + def self.inherited(subclass) + super + subclass.attribute_options.update(attribute_options) + subclass.memoized_attributes.update(memoized_attributes) + end + + def self.fetch_error_class + @fetch_error_class ||= if Object.const_defined?(:KeyError) + ::KeyError + else + ::IndexError + end + end + end +end + +require 'faraday/options/request_options' +require 'faraday/options/ssl_options' +require 'faraday/options/proxy_options' +require 'faraday/options/connection_options' +require 'faraday/options/env' diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/connection_options.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/connection_options.rb new file mode 100644 index 000000000..0698940b4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/connection_options.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Faraday + # @!parse + # # ConnectionOptions contains the configurable properties for a Faraday + # # connection object. + # class ConnectionOptions < Options; end + ConnectionOptions = Options.new(:request, :proxy, :ssl, :builder, :url, + :parallel_manager, :params, :headers, + :builder_class) do + options request: RequestOptions, ssl: SSLOptions + + memoized(:request) { self.class.options_for(:request).new } + + memoized(:ssl) { self.class.options_for(:ssl).new } + + memoized(:builder_class) { RackBuilder } + + def new_builder(block) + builder_class.new(&block) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/env.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/env.rb new file mode 100644 index 000000000..63d814bb6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/env.rb @@ -0,0 +1,204 @@ +# frozen_string_literal: true + +module Faraday + # @!parse + # # @!attribute method + # # @return [Symbol] HTTP method (`:get`, `:post`) + # # + # # @!attribute body + # # @return [String] The request body that will eventually be converted to a + # # string. + # # + # # @!attribute url + # # @return [URI] URI instance for the current request. + # # + # # @!attribute request + # # @return [Hash] options for configuring the request. + # # Options for configuring the request. + # # + # # - `:timeout` - time limit for the entire request (Integer in + # # seconds) + # # - `:open_timeout` - time limit for just the connection phase (e.g. + # # handshake) (Integer in seconds) + # # - `:read_timeout` - time limit for the first response byte received from + # # the server (Integer in seconds) + # # - `:write_timeout` - time limit for the client to send the request to the + # # server (Integer in seconds) + # # - `:on_data` - Proc for streaming + # # - `:proxy` - Hash of proxy options + # # - `:uri` - Proxy server URI + # # - `:user` - Proxy server username + # # - `:password` - Proxy server password + # # + # # @!attribute request_headers + # # @return [Hash] HTTP Headers to be sent to the server. + # # + # # @!attribute ssl + # # @return [Hash] options for configuring SSL requests + # # + # # @!attribute parallel_manager + # # @return [Object] sent if the connection is in parallel mode + # # + # # @!attribute params + # # @return [Hash] + # # + # # @!attribute response + # # @return [Response] + # # + # # @!attribute response_headers + # # @return [Hash] HTTP headers from the server + # # + # # @!attribute status + # # @return [Integer] HTTP response status code + # # + # # @!attribute reason_phrase + # # @return [String] + # class Env < Options; end + Env = Options.new(:method, :request_body, :url, :request, + :request_headers, :ssl, :parallel_manager, :params, + :response, :response_headers, :status, + :reason_phrase, :response_body) do + const_set(:ContentLength, 'Content-Length') + const_set(:StatusesWithoutBody, Set.new([204, 304])) + const_set(:SuccessfulStatuses, 200..299) + + # A Set of HTTP verbs that typically send a body. If no body is set for + # these requests, the Content-Length header is set to 0. + const_set(:MethodsWithBodies, Set.new(Faraday::METHODS_WITH_BODY.map(&:to_sym))) + + options request: RequestOptions, + request_headers: Utils::Headers, response_headers: Utils::Headers + + extend Forwardable + + def_delegators :request, :params_encoder + + # Build a new Env from given value. Respects and updates `custom_members`. + # + # @param value [Object] a value fitting Option.from(v). + # @return [Env] from given value + def self.from(value) + env = super(value) + if value.respond_to?(:custom_members) + env.custom_members.update(value.custom_members) + end + env + end + + # @param key [Object] + def [](key) + return self[current_body] if key == :body + + if in_member_set?(key) + super(key) + else + custom_members[key] + end + end + + # @param key [Object] + # @param value [Object] + def []=(key, value) + if key == :body + super(current_body, value) + return + end + + if in_member_set?(key) + super(key, value) + else + custom_members[key] = value + end + end + + def current_body + !!status ? :response_body : :request_body + end + + def body + self[:body] + end + + def body=(value) + self[:body] = value + end + + # @return [Boolean] true if status is in the set of {SuccessfulStatuses}. + def success? + Env::SuccessfulStatuses.include?(status) + end + + # @return [Boolean] true if there's no body yet, and the method is in the + # set of {Env::MethodsWithBodies}. + def needs_body? + !body && Env::MethodsWithBodies.include?(method) + end + + # Sets content length to zero and the body to the empty string. + def clear_body + request_headers[Env::ContentLength] = '0' + self.body = +'' + end + + # @return [Boolean] true if the status isn't in the set of + # {Env::StatusesWithoutBody}. + def parse_body? + !Env::StatusesWithoutBody.include?(status) + end + + # @return [Boolean] true if there is a parallel_manager + def parallel? + !!parallel_manager + end + + def inspect + attrs = [nil] + members.each do |mem| + if (value = send(mem)) + attrs << "@#{mem}=#{value.inspect}" + end + end + attrs << "@custom=#{custom_members.inspect}" unless custom_members.empty? + %(#<#{self.class}#{attrs.join(' ')}>) + end + + def stream_response? + request.stream_response? + end + + def stream_response(&block) + size = 0 + yielded = false + block_result = block.call do |chunk| + if chunk.bytesize.positive? || size.positive? + yielded = true + size += chunk.bytesize + request.on_data.call(chunk, size, self) + end + end + request.on_data.call(+'', 0, self) unless yielded + block_result + end + + # @private + def custom_members + @custom_members ||= {} + end + + # @private + if members.first.is_a?(Symbol) + def in_member_set?(key) + self.class.member_set.include?(key.to_sym) + end + else + def in_member_set?(key) + self.class.member_set.include?(key.to_s) + end + end + + # @private + def self.member_set + @member_set ||= Set.new(members) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/proxy_options.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/proxy_options.rb new file mode 100644 index 000000000..028f3d2ab --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/proxy_options.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +module Faraday + # @!parse + # # ProxyOptions contains the configurable properties for the proxy + # # configuration used when making an HTTP request. + # class ProxyOptions < Options; end + ProxyOptions = Options.new(:uri, :user, :password) do + extend Forwardable + def_delegators :uri, :scheme, :scheme=, :host, :host=, :port, :port=, + :path, :path= + + def self.from(value) + case value + when '' + value = nil + when String + # URIs without a scheme should default to http (like 'example:123'). + # This fixes #1282 and prevents a silent failure in some adapters. + value = "http://#{value}" unless value.include?('://') + value = { uri: Utils.URI(value) } + when URI + value = { uri: value } + when Hash, Options + if value[:uri] + value = value.dup.tap do |duped| + duped[:uri] = Utils.URI(duped[:uri]) + end + end + end + + super(value) + end + + memoized(:user) { uri&.user && Utils.unescape(uri.user) } + memoized(:password) { uri&.password && Utils.unescape(uri.password) } + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/request_options.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/request_options.rb new file mode 100644 index 000000000..3bb67c8e4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/request_options.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Faraday + # @!parse + # # RequestOptions contains the configurable properties for a Faraday request. + # class RequestOptions < Options; end + RequestOptions = Options.new(:params_encoder, :proxy, :bind, + :timeout, :open_timeout, :read_timeout, + :write_timeout, :boundary, :oauth, + :context, :on_data) do + def []=(key, value) + if key && key.to_sym == :proxy + super(key, value ? ProxyOptions.from(value) : nil) + else + super(key, value) + end + end + + def stream_response? + on_data.is_a?(Proc) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/ssl_options.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/ssl_options.rb new file mode 100644 index 000000000..71eef15c5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/options/ssl_options.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +module Faraday + # @!parse + # # SSL-related options. + # # + # # @!attribute verify + # # @return [Boolean] whether to verify SSL certificates or not + # # + # # @!attribute verify_hostname + # # @return [Boolean] whether to enable hostname verification on server certificates + # # during the handshake or not (see https://github.com/ruby/openssl/pull/60) + # # + # # @!attribute hostname + # # @return [String] Server hostname used for SNI (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL/SSLSocket.html#method-i-hostname-3D) + # # + # # @!attribute ca_file + # # @return [String] CA file + # # + # # @!attribute ca_path + # # @return [String] CA path + # # + # # @!attribute verify_mode + # # @return [Integer] Any `OpenSSL::SSL::` constant (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL.html) + # # + # # @!attribute cert_store + # # @return [OpenSSL::X509::Store] certificate store + # # + # # @!attribute client_cert + # # @return [String, OpenSSL::X509::Certificate] client certificate + # # + # # @!attribute client_key + # # @return [String, OpenSSL::PKey::RSA, OpenSSL::PKey::DSA] client key + # # + # # @!attribute certificate + # # @return [OpenSSL::X509::Certificate] certificate (Excon only) + # # + # # @!attribute private_key + # # @return [OpenSSL::PKey::RSA, OpenSSL::PKey::DSA] private key (Excon only) + # # + # # @!attribute verify_depth + # # @return [Integer] maximum depth for the certificate chain verification + # # + # # @!attribute version + # # @return [String, Symbol] SSL version (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL/SSLContext.html#method-i-ssl_version-3D) + # # + # # @!attribute min_version + # # @return [String, Symbol] minimum SSL version (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL/SSLContext.html#method-i-min_version-3D) + # # + # # @!attribute max_version + # # @return [String, Symbol] maximum SSL version (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL/SSLContext.html#method-i-max_version-3D) + # # + # # @!attribute ciphers + # # @return [String] cipher list in OpenSSL format (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL/SSLContext.html#method-i-ciphers-3D) + # class SSLOptions < Options; end + SSLOptions = Options.new(:verify, :verify_hostname, :hostname, + :ca_file, :ca_path, :verify_mode, + :cert_store, :client_cert, :client_key, + :certificate, :private_key, :verify_depth, + :version, :min_version, :max_version, :ciphers) do + # @return [Boolean] true if should verify + def verify? + verify != false + end + + # @return [Boolean] true if should not verify + def disable? + !verify? + end + + # @return [Boolean] true if should verify_hostname + def verify_hostname? + verify_hostname != false + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/parameters.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/parameters.rb new file mode 100644 index 000000000..cfb35d085 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/parameters.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +require 'forwardable' +require 'faraday/encoders/nested_params_encoder' +require 'faraday/encoders/flat_params_encoder' diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/rack_builder.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/rack_builder.rb new file mode 100644 index 000000000..1d86d353c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/rack_builder.rb @@ -0,0 +1,248 @@ +# frozen_string_literal: true + +require 'faraday/adapter_registry' + +module Faraday + # A Builder that processes requests into responses by passing through an inner + # middleware stack (heavily inspired by Rack). + # + # @example + # Faraday::Connection.new(url: 'http://httpbingo.org') do |builder| + # builder.request :url_encoded # Faraday::Request::UrlEncoded + # builder.adapter :net_http # Faraday::Adapter::NetHttp + # end + class RackBuilder + # Used to detect missing arguments + NO_ARGUMENT = Object.new + + attr_accessor :handlers + + # Error raised when trying to modify the stack after calling `lock!` + class StackLocked < RuntimeError; end + + # borrowed from ActiveSupport::Dependencies::Reference & + # ActionDispatch::MiddlewareStack::Middleware + class Handler + REGISTRY = Faraday::AdapterRegistry.new + + attr_reader :name + + def initialize(klass, *args, **kwargs, &block) + @name = klass.to_s + REGISTRY.set(klass) if klass.respond_to?(:name) + @args = args + @kwargs = kwargs + @block = block + end + + def klass + REGISTRY.get(@name) + end + + def inspect + @name + end + + def ==(other) + if other.is_a? Handler + name == other.name + elsif other.respond_to? :name + klass == other + else + @name == other.to_s + end + end + + def build(app = nil) + klass.new(app, *@args, **@kwargs, &@block) + end + end + + def initialize(&block) + @adapter = nil + @handlers = [] + build(&block) + end + + def initialize_dup(original) + super + @adapter = original.adapter + @handlers = original.handlers.dup + end + + def build + raise_if_locked + block_given? ? yield(self) : request(:url_encoded) + adapter(Faraday.default_adapter, **Faraday.default_adapter_options) unless @adapter + end + + def [](idx) + @handlers[idx] + end + + # Locks the middleware stack to ensure no further modifications are made. + def lock! + @handlers.freeze + end + + def locked? + @handlers.frozen? + end + + def use(klass, ...) + if klass.is_a? Symbol + use_symbol(Faraday::Middleware, klass, ...) + else + raise_if_locked + raise_if_adapter(klass) + @handlers << self.class::Handler.new(klass, ...) + end + end + + def request(key, ...) + use_symbol(Faraday::Request, key, ...) + end + + def response(...) + use_symbol(Faraday::Response, ...) + end + + def adapter(klass = NO_ARGUMENT, *args, **kwargs, &block) + return @adapter if klass == NO_ARGUMENT || klass.nil? + + klass = Faraday::Adapter.lookup_middleware(klass) if klass.is_a?(Symbol) + @adapter = self.class::Handler.new(klass, *args, **kwargs, &block) + end + + ## methods to push onto the various positions in the stack: + + def insert(index, ...) + raise_if_locked + index = assert_index(index) + handler = self.class::Handler.new(...) + @handlers.insert(index, handler) + end + + alias insert_before insert + + def insert_after(index, ...) + index = assert_index(index) + insert(index + 1, ...) + end + + def swap(index, ...) + raise_if_locked + index = assert_index(index) + @handlers.delete_at(index) + insert(index, ...) + end + + def delete(handler) + raise_if_locked + @handlers.delete(handler) + end + + # Processes a Request into a Response by passing it through this Builder's + # middleware stack. + # + # @param connection [Faraday::Connection] + # @param request [Faraday::Request] + # + # @return [Faraday::Response] + def build_response(connection, request) + app.call(build_env(connection, request)) + end + + # The "rack app" wrapped in middleware. All requests are sent here. + # + # The builder is responsible for creating the app object. After this, + # the builder gets locked to ensure no further modifications are made + # to the middleware stack. + # + # Returns an object that responds to `call` and returns a Response. + def app + @app ||= begin + lock! + ensure_adapter! + to_app + end + end + + def to_app + # last added handler is the deepest and thus closest to the inner app + # adapter is always the last one + @handlers.reverse.inject(@adapter.build) do |app, handler| + handler.build(app) + end + end + + def ==(other) + other.is_a?(self.class) && + @handlers == other.handlers && + @adapter == other.adapter + end + + # ENV Keys + # :http_method - a symbolized request HTTP method (:get, :post) + # :body - the request body that will eventually be converted to a string. + # :url - URI instance for the current request. + # :status - HTTP response status code + # :request_headers - hash of HTTP Headers to be sent to the server + # :response_headers - Hash of HTTP headers from the server + # :parallel_manager - sent if the connection is in parallel mode + # :request - Hash of options for configuring the request. + # :timeout - open/read timeout Integer in seconds + # :open_timeout - read timeout Integer in seconds + # :proxy - Hash of proxy options + # :uri - Proxy Server URI + # :user - Proxy server username + # :password - Proxy server password + # :ssl - Hash of options for configuring SSL requests. + def build_env(connection, request) + exclusive_url = connection.build_exclusive_url( + request.path, request.params, + request.options.params_encoder + ) + + Env.new(request.http_method, request.body, exclusive_url, + request.options, request.headers, connection.ssl, + connection.parallel_manager) + end + + private + + LOCK_ERR = "can't modify middleware stack after making a request" + MISSING_ADAPTER_ERROR = "An attempt to run a request with a Faraday::Connection without adapter has been made.\n" \ + "Please set Faraday.default_adapter or provide one when initializing the connection.\n" \ + 'For more info, check https://lostisland.github.io/faraday/usage/.' + + def raise_if_locked + raise StackLocked, LOCK_ERR if locked? + end + + def raise_if_adapter(klass) + return unless klass <= Faraday::Adapter + + raise 'Adapter should be set using the `adapter` method, not `use`' + end + + def ensure_adapter! + raise MISSING_ADAPTER_ERROR unless @adapter + end + + def adapter_set? + !@adapter.nil? + end + + def use_symbol(mod, key, ...) + use(mod.lookup_middleware(key), ...) + end + + def assert_index(index) + idx = index.is_a?(Integer) ? index : @handlers.index(index) + raise "No such handler: #{index.inspect}" unless idx + + idx + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request.rb new file mode 100644 index 000000000..8c5bf9525 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request.rb @@ -0,0 +1,139 @@ +# frozen_string_literal: true + +module Faraday + # Used to setup URLs, params, headers, and the request body in a sane manner. + # + # @example + # @connection.post do |req| + # req.url 'http://localhost', 'a' => '1' # 'http://localhost?a=1' + # req.headers['b'] = '2' # Header + # req.params['c'] = '3' # GET Param + # req['b'] = '2' # also Header + # req.body = 'abc' + # end + # + # @!attribute http_method + # @return [Symbol] the HTTP method of the Request + # @!attribute path + # @return [URI, String] the path + # @!attribute params + # @return [Hash] query parameters + # @!attribute headers + # @return [Faraday::Utils::Headers] headers + # @!attribute body + # @return [String] body + # @!attribute options + # @return [RequestOptions] options + Request = Struct.new(:http_method, :path, :params, :headers, :body, :options) do + extend MiddlewareRegistry + + alias_method :member_get, :[] + private :member_get + alias_method :member_set, :[]= + private :member_set + + # @param request_method [String] + # @yield [request] for block customization, if block given + # @yieldparam request [Request] + # @return [Request] + def self.create(request_method) + new(request_method).tap do |request| + yield(request) if block_given? + end + end + + remove_method :params= + # Replace params, preserving the existing hash type. + # + # @param hash [Hash] new params + def params=(hash) + if params + params.replace hash + else + member_set(:params, hash) + end + end + + remove_method :headers= + # Replace request headers, preserving the existing hash type. + # + # @param hash [Hash] new headers + def headers=(hash) + if headers + headers.replace hash + else + member_set(:headers, hash) + end + end + + # Update path and params. + # + # @param path [URI, String] + # @param params [Hash, nil] + # @return [void] + def url(path, params = nil) + if path.respond_to? :query + if (query = path.query) + path = path.dup + path.query = nil + end + else + anchor_index = path.index('#') + path = path.slice(0, anchor_index) unless anchor_index.nil? + path, query = path.split('?', 2) + end + self.path = path + self.params.merge_query query, options.params_encoder + self.params.update(params) if params + end + + # @param key [Object] key to look up in headers + # @return [Object] value of the given header name + def [](key) + headers[key] + end + + # @param key [Object] key of header to write + # @param value [Object] value of header + def []=(key, value) + headers[key] = value + end + + # Marshal serialization support. + # + # @return [Hash] the hash ready to be serialized in Marshal. + def marshal_dump + { + http_method: http_method, + body: body, + headers: headers, + path: path, + params: params, + options: options + } + end + + # Marshal serialization support. + # Restores the instance variables according to the +serialised+. + # @param serialised [Hash] the serialised object. + def marshal_load(serialised) + self.http_method = serialised[:http_method] + self.body = serialised[:body] + self.headers = serialised[:headers] + self.path = serialised[:path] + self.params = serialised[:params] + self.options = serialised[:options] + end + + # @return [Env] the Env for this Request + def to_env(connection) + Env.new(http_method, body, connection.build_exclusive_url(path, params), + options, headers, connection.ssl, connection.parallel_manager) + end + end +end + +require 'faraday/request/authorization' +require 'faraday/request/instrumentation' +require 'faraday/request/json' +require 'faraday/request/url_encoded' diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/authorization.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/authorization.rb new file mode 100644 index 000000000..43732432d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/authorization.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +module Faraday + class Request + # Request middleware for the Authorization HTTP header + class Authorization < Faraday::Middleware + KEY = 'Authorization' + + # @param app [#call] + # @param type [String, Symbol] Type of Authorization + # @param params [Array] parameters to build the Authorization header. + # If the type is `:basic`, then these can be a login and password pair. + # Otherwise, a single value is expected that will be appended after the type. + # This value can be a proc or an object responding to `.call`, in which case + # it will be invoked on each request. + def initialize(app, type, *params) + @type = type + @params = params + super(app) + end + + # @param env [Faraday::Env] + def on_request(env) + return if env.request_headers[KEY] + + env.request_headers[KEY] = header_from(@type, env, *@params) + end + + private + + # @param type [String, Symbol] + # @param env [Faraday::Env] + # @param params [Array] + # @return [String] a header value + def header_from(type, env, *params) + if type.to_s.casecmp('basic').zero? && params.size == 2 + Utils.basic_header_from(*params) + elsif params.size != 1 + raise ArgumentError, "Unexpected params received (got #{params.size} instead of 1)" + else + value = params.first + if (value.is_a?(Proc) && value.arity == 1) || (value.respond_to?(:call) && value.method(:call).arity == 1) + value = value.call(env) + elsif value.is_a?(Proc) || value.respond_to?(:call) + value = value.call + end + "#{type} #{value}" + end + end + end + end +end + +Faraday::Request.register_middleware(authorization: Faraday::Request::Authorization) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/instrumentation.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/instrumentation.rb new file mode 100644 index 000000000..a5020598d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/instrumentation.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +module Faraday + class Request + # Middleware for instrumenting Requests. + class Instrumentation < Faraday::Middleware + # Options class used in Request::Instrumentation class. + Options = Faraday::Options.new(:name, :instrumenter) do + remove_method :name + # @return [String] + def name + self[:name] ||= 'request.faraday' + end + + remove_method :instrumenter + # @return [Class] + def instrumenter + self[:instrumenter] ||= ActiveSupport::Notifications + end + end + + # Instruments requests using Active Support. + # + # Measures time spent only for synchronous requests. + # + # @example Using ActiveSupport::Notifications to measure time spent + # for Faraday requests. + # ActiveSupport::Notifications + # .subscribe('request.faraday') do |name, starts, ends, _, env| + # url = env[:url] + # http_method = env[:method].to_s.upcase + # duration = ends - starts + # $stderr.puts '[%s] %s %s (%.3f s)' % + # [url.host, http_method, url.request_uri, duration] + # end + # @param app [#call] + # @param options [nil, Hash] Options hash + # @option options [String] :name ('request.faraday') + # Name of the instrumenter + # @option options [Class] :instrumenter (ActiveSupport::Notifications) + # Active Support instrumenter class. + def initialize(app, options = nil) + super(app) + @name, @instrumenter = Options.from(options) + .values_at(:name, :instrumenter) + end + + # @param env [Faraday::Env] + def call(env) + @instrumenter.instrument(@name, env) do + @app.call(env) + end + end + end + end +end + +Faraday::Request.register_middleware(instrumentation: Faraday::Request::Instrumentation) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/json.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/json.rb new file mode 100644 index 000000000..f12ebe298 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/json.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'json' + +module Faraday + class Request + # Request middleware that encodes the body as JSON. + # + # Processes only requests with matching Content-type or those without a type. + # If a request doesn't have a type but has a body, it sets the Content-type + # to JSON MIME-type. + # + # Doesn't try to encode bodies that already are in string form. + class Json < Middleware + MIME_TYPE = 'application/json' + MIME_TYPE_REGEX = %r{^application/(vnd\..+\+)?json$} + + def on_request(env) + match_content_type(env) do |data| + env[:body] = encode(data) + end + end + + private + + def encode(data) + if options[:encoder].is_a?(Array) && options[:encoder].size >= 2 + options[:encoder][0].public_send(options[:encoder][1], data) + elsif options[:encoder].respond_to?(:dump) + options[:encoder].dump(data) + else + ::JSON.generate(data) + end + end + + def match_content_type(env) + return unless process_request?(env) + + env[:request_headers][CONTENT_TYPE] ||= MIME_TYPE + yield env[:body] unless env[:body].respond_to?(:to_str) + end + + def process_request?(env) + type = request_type(env) + body?(env) && (type.empty? || type.match?(MIME_TYPE_REGEX)) + end + + def body?(env) + body = env[:body] + case body + when true, false + true + when nil + # NOTE: nil can be converted to `"null"`, but this middleware doesn't process `nil` for the compatibility. + false + else + !(body.respond_to?(:to_str) && body.empty?) + end + end + + def request_type(env) + type = env[:request_headers][CONTENT_TYPE].to_s + type = type.split(';', 2).first if type.index(';') + type + end + end + end +end + +Faraday::Request.register_middleware(json: Faraday::Request::Json) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/url_encoded.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/url_encoded.rb new file mode 100644 index 000000000..5ac7dcb35 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/request/url_encoded.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module Faraday + class Request + # Middleware for supporting urlencoded requests. + class UrlEncoded < Faraday::Middleware + unless defined?(::Faraday::Request::UrlEncoded::CONTENT_TYPE) + CONTENT_TYPE = 'Content-Type' + end + + class << self + attr_accessor :mime_type + end + self.mime_type = 'application/x-www-form-urlencoded' + + # Encodes as "application/x-www-form-urlencoded" if not already encoded or + # of another type. + # + # @param env [Faraday::Env] + def call(env) + match_content_type(env) do |data| + params = Faraday::Utils::ParamsHash[data] + env.body = params.to_query(env.params_encoder) + end + @app.call env + end + + # @param env [Faraday::Env] + # @yield [request_body] Body of the request + def match_content_type(env) + return unless process_request?(env) + + env.request_headers[CONTENT_TYPE] ||= self.class.mime_type + return if env.body.respond_to?(:to_str) || env.body.respond_to?(:read) + + yield(env.body) + end + + # @param env [Faraday::Env] + # + # @return [Boolean] True if the request has a body and its Content-Type is + # urlencoded. + def process_request?(env) + type = request_type(env) + env.body && (type.empty? || (type == self.class.mime_type)) + end + + # @param env [Faraday::Env] + # + # @return [String] + def request_type(env) + type = env.request_headers[CONTENT_TYPE].to_s + type = type.split(';', 2).first if type.index(';') + type + end + end + end +end + +Faraday::Request.register_middleware(url_encoded: Faraday::Request::UrlEncoded) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response.rb new file mode 100644 index 000000000..d1fa9320d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response.rb @@ -0,0 +1,91 @@ +# frozen_string_literal: true + +require 'forwardable' + +module Faraday + # Response represents an HTTP response from making an HTTP request. + class Response + extend Forwardable + extend MiddlewareRegistry + + def initialize(env = nil) + @env = Env.from(env) if env + @on_complete_callbacks = [] + end + + attr_reader :env + + def status + finished? ? env.status : nil + end + + def reason_phrase + finished? ? env.reason_phrase : nil + end + + def headers + finished? ? env.response_headers : {} + end + + def_delegator :headers, :[] + + def body + finished? ? env.body : nil + end + + def finished? + !!env + end + + def on_complete(&block) + if finished? + yield(env) + else + @on_complete_callbacks << block + end + self + end + + def finish(env) + raise 'response already finished' if finished? + + @env = env.is_a?(Env) ? env : Env.from(env) + @on_complete_callbacks.each { |callback| callback.call(@env) } + self + end + + def success? + finished? && env.success? + end + + def to_hash + { + status: env.status, body: env.body, + response_headers: env.response_headers, + url: env.url + } + end + + # because @on_complete_callbacks cannot be marshalled + def marshal_dump + finished? ? to_hash : nil + end + + def marshal_load(env) + @env = Env.from(env) + end + + # Expand the env with more properties, without overriding existing ones. + # Useful for applying request params after restoring a marshalled Response. + def apply_request(request_env) + raise "response didn't finish yet" unless finished? + + @env = Env.from(request_env).update(@env) + self + end + end +end + +require 'faraday/response/json' +require 'faraday/response/logger' +require 'faraday/response/raise_error' diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/json.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/json.rb new file mode 100644 index 000000000..71a57edb7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/json.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'json' + +module Faraday + class Response + # Parse response bodies as JSON. + class Json < Middleware + def initialize(app = nil, parser_options: nil, content_type: /\bjson$/, preserve_raw: false) + super(app) + @parser_options = parser_options + @content_types = Array(content_type) + @preserve_raw = preserve_raw + + process_parser_options + end + + def on_complete(env) + process_response(env) if parse_response?(env) + end + + private + + def process_response(env) + env[:raw_body] = env[:body] if @preserve_raw + env[:body] = parse(env[:body]) + rescue StandardError, SyntaxError => e + raise Faraday::ParsingError.new(e, env[:response]) + end + + def parse(body) + return if body.strip.empty? + + decoder, method_name = @decoder_options + + decoder.public_send(method_name, body, @parser_options || {}) + end + + def parse_response?(env) + process_response_type?(env) && + env[:body].respond_to?(:to_str) + end + + def process_response_type?(env) + type = response_type(env) + @content_types.empty? || @content_types.any? do |pattern| + pattern.is_a?(Regexp) ? type.match?(pattern) : type == pattern + end + end + + def response_type(env) + type = env[:response_headers][CONTENT_TYPE].to_s + type = type.split(';', 2).first if type.index(';') + type + end + + def process_parser_options + @decoder_options = @parser_options&.delete(:decoder) + + @decoder_options = + if @decoder_options.is_a?(Array) && @decoder_options.size >= 2 + @decoder_options.slice(0, 2) + elsif @decoder_options&.respond_to?(:load) # rubocop:disable Lint/RedundantSafeNavigation + # In some versions of Rails, `nil` responds to `load` - hence the safe navigation check above + [@decoder_options, :load] + else + [::JSON, :parse] + end + end + end + end +end + +Faraday::Response.register_middleware(json: Faraday::Response::Json) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/logger.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/logger.rb new file mode 100644 index 000000000..94dbc92ac --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/logger.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'forwardable' +require 'logger' +require 'faraday/logging/formatter' + +module Faraday + class Response + # Logger is a middleware that logs internal events in the HTTP request + # lifecycle to a given Logger object. By default, this logs to STDOUT. See + # Faraday::Logging::Formatter to see specifically what is logged. + class Logger < Middleware + DEFAULT_OPTIONS = { formatter: Logging::Formatter }.merge(Logging::Formatter::DEFAULT_OPTIONS).freeze + + def initialize(app, logger = nil, options = {}) + super(app, options) + logger ||= ::Logger.new($stdout) + formatter_class = @options.delete(:formatter) + @formatter = formatter_class.new(logger: logger, options: @options) + yield @formatter if block_given? + end + + def call(env) + @formatter.request(env) + super + end + + def on_complete(env) + @formatter.response(env) + end + + def on_error(exc) + @formatter.exception(exc) if @formatter.respond_to?(:exception) + end + end + end +end + +Faraday::Response.register_middleware(logger: Faraday::Response::Logger) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/raise_error.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/raise_error.rb new file mode 100644 index 000000000..0b543219b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/response/raise_error.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +module Faraday + class Response + # RaiseError is a Faraday middleware that raises exceptions on common HTTP + # client or server error responses. + class RaiseError < Middleware + # rubocop:disable Naming/ConstantName + ClientErrorStatuses = (400...500) + ServerErrorStatuses = (500...600) + ClientErrorStatusesWithCustomExceptions = { + 400 => Faraday::BadRequestError, + 401 => Faraday::UnauthorizedError, + 403 => Faraday::ForbiddenError, + 404 => Faraday::ResourceNotFound, + 408 => Faraday::RequestTimeoutError, + 409 => Faraday::ConflictError, + 422 => Faraday::UnprocessableEntityError, + 429 => Faraday::TooManyRequestsError + }.freeze + # rubocop:enable Naming/ConstantName + + DEFAULT_OPTIONS = { include_request: true, allowed_statuses: [] }.freeze + + def on_complete(env) + return if Array(options[:allowed_statuses]).include?(env[:status]) + + case env[:status] + when *ClientErrorStatusesWithCustomExceptions.keys + raise ClientErrorStatusesWithCustomExceptions[env[:status]], response_values(env) + when 407 + # mimic the behavior that we get with proxy requests with HTTPS + msg = %(407 "Proxy Authentication Required") + raise Faraday::ProxyAuthError.new(msg, response_values(env)) + when ClientErrorStatuses + raise Faraday::ClientError, response_values(env) + when ServerErrorStatuses + raise Faraday::ServerError, response_values(env) + when nil + raise Faraday::NilStatusError, response_values(env) + end + end + + # Returns a hash of response data with the following keys: + # - status + # - headers + # - body + # - request + # + # The `request` key is omitted when the middleware is explicitly + # configured with the option `include_request: false`. + def response_values(env) + response = { + status: env.status, + headers: env.response_headers, + body: env.body + } + + # Include the request data by default. If the middleware was explicitly + # configured to _not_ include request data, then omit it. + return response unless options[:include_request] + + response.merge( + request: { + method: env.method, + url: env.url, + url_path: env.url.path, + params: query_params(env), + headers: env.request_headers, + body: env.request_body + } + ) + end + + def query_params(env) + env.request.params_encoder ||= Faraday::Utils.default_params_encoder + env.params_encoder.decode(env.url.query) + end + end + end +end + +Faraday::Response.register_middleware(raise_error: Faraday::Response::RaiseError) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils.rb new file mode 100644 index 000000000..809b3a88e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils.rb @@ -0,0 +1,121 @@ +# frozen_string_literal: true + +require 'uri' +require 'faraday/utils/headers' +require 'faraday/utils/params_hash' + +module Faraday + # Utils contains various static helper methods. + module Utils + module_function + + def build_query(params) + FlatParamsEncoder.encode(params) + end + + def build_nested_query(params) + NestedParamsEncoder.encode(params) + end + + def default_space_encoding + @default_space_encoding ||= '+' + end + + class << self + attr_writer :default_space_encoding + end + + ESCAPE_RE = /[^a-zA-Z0-9 .~_-]/ + + def escape(str) + str.to_s.gsub(ESCAPE_RE) do |match| + "%#{match.unpack('H2' * match.bytesize).join('%').upcase}" + end.gsub(' ', default_space_encoding) + end + + def unescape(str) + CGI.unescape str.to_s + end + + DEFAULT_SEP = /[&;] */n + + # Adapted from Rack + def parse_query(query) + FlatParamsEncoder.decode(query) + end + + def parse_nested_query(query) + NestedParamsEncoder.decode(query) + end + + def default_params_encoder + @default_params_encoder ||= NestedParamsEncoder + end + + def basic_header_from(login, pass) + value = ["#{login}:#{pass}"].pack('m') # Base64 encoding + value.delete!("\n") + "Basic #{value}" + end + + class << self + attr_writer :default_params_encoder + end + + # Normalize URI() behavior across Ruby versions + # + # url - A String or URI. + # + # Returns a parsed URI. + def URI(url) # rubocop:disable Naming/MethodName + if url.respond_to?(:host) + url + elsif url.respond_to?(:to_str) + default_uri_parser.call(url) + else + raise ArgumentError, 'bad argument (expected URI object or URI string)' + end + end + + def default_uri_parser + @default_uri_parser ||= Kernel.method(:URI) + end + + def default_uri_parser=(parser) + @default_uri_parser = if parser.respond_to?(:call) || parser.nil? + parser + else + parser.method(:parse) + end + end + + # Receives a String or URI and returns just + # the path with the query string sorted. + def normalize_path(url) + url = URI(url) + (url.path.start_with?('/') ? url.path : "/#{url.path}") + + (url.query ? "?#{sort_query_params(url.query)}" : '') + end + + # Recursive hash update + def deep_merge!(target, hash) + hash.each do |key, value| + target[key] = if value.is_a?(Hash) && (target[key].is_a?(Hash) || target[key].is_a?(Options)) + deep_merge(target[key], value) + else + value + end + end + target + end + + # Recursive hash merge + def deep_merge(source, hash) + deep_merge!(source.dup, hash) + end + + def sort_query_params(query) + query.split('&').sort.join('&') + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/headers.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/headers.rb new file mode 100644 index 000000000..27b06e964 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/headers.rb @@ -0,0 +1,150 @@ +# frozen_string_literal: true + +module Faraday + module Utils + # A case-insensitive Hash that preserves the original case of a header + # when set. + # + # Adapted from Rack::Utils::HeaderHash + class Headers < ::Hash + def self.from(value) + new(value) + end + + def self.allocate + new_self = super + new_self.initialize_names + new_self + end + + def initialize(hash = nil) + super() + @names = {} + update(hash || {}) + end + + def initialize_names + @names = {} + end + + # on dup/clone, we need to duplicate @names hash + def initialize_copy(other) + super + @names = other.names.dup + end + + # need to synchronize concurrent writes to the shared KeyMap + keymap_mutex = Mutex.new + + # symbol -> string mapper + cache + KeyMap = Hash.new do |map, key| + value = if key.respond_to?(:to_str) + key + else + key.to_s.split('_') # user_agent: %w(user agent) + .each(&:capitalize!) # => %w(User Agent) + .join('-') # => "User-Agent" + end + keymap_mutex.synchronize { map[key] = value } + end + KeyMap[:etag] = 'ETag' + + def [](key) + key = KeyMap[key] + super(key) || super(@names[key.downcase]) + end + + def []=(key, val) + key = KeyMap[key] + key = (@names[key.downcase] ||= key) + # join multiple values with a comma + val = val.to_ary.join(', ') if val.respond_to?(:to_ary) + super(key, val) + end + + def fetch(key, ...) + key = KeyMap[key] + key = @names.fetch(key.downcase, key) + super(key, ...) + end + + def delete(key) + key = KeyMap[key] + key = @names[key.downcase] + return unless key + + @names.delete key.downcase + super(key) + end + + def dig(key, *rest) + key = KeyMap[key] + key = @names.fetch(key.downcase, key) + super(key, *rest) + end + + def include?(key) + @names.include? key.downcase + end + + alias has_key? include? + alias member? include? + alias key? include? + + def merge!(other) + other.each { |k, v| self[k] = v } + self + end + + alias update merge! + + def merge(other) + hash = dup + hash.merge! other + end + + def replace(other) + clear + @names.clear + update other + self + end + + def to_hash + {}.update(self) + end + + def parse(header_string) + return unless header_string && !header_string.empty? + + headers = header_string.split("\r\n") + + # Find the last set of response headers. + start_index = headers.rindex { |x| x.start_with?('HTTP/') } || 0 + last_response = headers.slice(start_index, headers.size) + + last_response + .tap { |a| a.shift if a.first.start_with?('HTTP/') } + .map { |h| h.split(/:\s*/, 2) } # split key and value + .reject { |p| p[0].nil? } # ignore blank lines + .each { |key, value| add_parsed(key, value) } + end + + protected + + attr_reader :names + + private + + # Join multiple values with a comma. + def add_parsed(key, value) + if key?(key) + self[key] = self[key].to_s + self[key] << ', ' << value + else + self[key] = value + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/params_hash.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/params_hash.rb new file mode 100644 index 000000000..0e16d9350 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/utils/params_hash.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Faraday + module Utils + # A hash with stringified keys. + class ParamsHash < Hash + def [](key) + super(convert_key(key)) + end + + def []=(key, value) + super(convert_key(key), value) + end + + def delete(key) + super(convert_key(key)) + end + + def include?(key) + super(convert_key(key)) + end + + alias has_key? include? + alias member? include? + alias key? include? + + def update(params) + params.each do |key, value| + self[key] = value + end + self + end + alias merge! update + + def merge(params) + dup.update(params) + end + + def replace(other) + clear + update(other) + end + + def merge_query(query, encoder = nil) + return self unless query && !query.empty? + + update((encoder || Utils.default_params_encoder).decode(query)) + end + + def to_query(encoder = nil) + (encoder || Utils.default_params_encoder).encode(self) + end + + private + + def convert_key(key) + key.to_s + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/version.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/version.rb new file mode 100644 index 000000000..10205e427 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/lib/faraday/version.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +module Faraday + VERSION = '2.13.4' +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/external_adapters/faraday_specs_setup.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/external_adapters/faraday_specs_setup.rb new file mode 100644 index 000000000..ac7f7b643 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/external_adapters/faraday_specs_setup.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'webmock/rspec' +WebMock.disable_net_connect!(allow_localhost: true) + +require_relative '../support/helper_methods' +require_relative '../support/disabling_stub' +require_relative '../support/streaming_response_checker' +require_relative '../support/shared_examples/adapter' +require_relative '../support/shared_examples/request_method' + +RSpec.configure do |config| + config.include Faraday::HelperMethods +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter/test_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter/test_spec.rb new file mode 100644 index 000000000..117bb7899 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter/test_spec.rb @@ -0,0 +1,442 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Adapter::Test do + let(:stubs) do + described_class::Stubs.new do |stub| + stub.get('http://domain.test/hello') do + [200, { 'Content-Type' => 'text/html' }, 'domain: hello'] + end + + stub.get('http://wrong.test/hello') do + [200, { 'Content-Type' => 'text/html' }, 'wrong: hello'] + end + + stub.get('http://wrong.test/bait') do + [404, { 'Content-Type' => 'text/html' }] + end + + stub.get('/hello') do + [200, { 'Content-Type' => 'text/html' }, 'hello'] + end + + stub.get('/method-echo') do |env| + [200, { 'Content-Type' => 'text/html' }, env[:method].to_s] + end + + stub.get(%r{\A/resources/\d+(?:\?|\z)}) do + [200, { 'Content-Type' => 'text/html' }, 'show'] + end + + stub.get(%r{\A/resources/(specified)\z}) do |_env, meta| + [200, { 'Content-Type' => 'text/html' }, "show #{meta[:match_data][1]}"] + end + end + end + + let(:connection) do + Faraday.new do |builder| + builder.adapter :test, stubs + end + end + + let(:response) { connection.get('/hello') } + + context 'with simple path sets status' do + subject { response.status } + + it { is_expected.to eq 200 } + end + + context 'with simple path sets headers' do + subject { response.headers['Content-Type'] } + + it { is_expected.to eq 'text/html' } + end + + context 'with simple path sets body' do + subject { response.body } + + it { is_expected.to eq 'hello' } + end + + context 'with host points to the right stub' do + subject { connection.get('http://domain.test/hello').body } + + it { is_expected.to eq 'domain: hello' } + end + + describe 'can be called several times' do + subject { connection.get('/hello').body } + + it { is_expected.to eq 'hello' } + end + + describe 'can handle regular expression path' do + subject { connection.get('/resources/1').body } + + it { is_expected.to eq 'show' } + end + + describe 'can handle single parameter block' do + subject { connection.get('/method-echo').body } + + it { is_expected.to eq 'get' } + end + + describe 'can handle regular expression path with captured result' do + subject { connection.get('/resources/specified').body } + + it { is_expected.to eq 'show specified' } + end + + context 'with get params' do + subject { connection.get('/param?a=1').body } + + before do + stubs.get('/param?a=1') { [200, {}, 'a'] } + end + + it { is_expected.to eq 'a' } + end + + describe 'ignoring unspecified get params' do + before do + stubs.get('/optional?a=1') { [200, {}, 'a'] } + end + + context 'with multiple params' do + subject { connection.get('/optional?a=1&b=1').body } + + it { is_expected.to eq 'a' } + end + + context 'with single param' do + subject { connection.get('/optional?a=1').body } + + it { is_expected.to eq 'a' } + end + + context 'without params' do + subject(:request) { connection.get('/optional') } + + it do + expect { request }.to raise_error( + Faraday::Adapter::Test::Stubs::NotFound + ) + end + end + end + + context 'with http headers' do + before do + stubs.get('/yo', 'X-HELLO' => 'hello') { [200, {}, 'a'] } + stubs.get('/yo') { [200, {}, 'b'] } + end + + context 'with header' do + subject do + connection.get('/yo') { |env| env.headers['X-HELLO'] = 'hello' }.body + end + + it { is_expected.to eq 'a' } + end + + context 'without header' do + subject do + connection.get('/yo').body + end + + it { is_expected.to eq 'b' } + end + end + + describe 'different outcomes for the same request' do + def make_request + connection.get('/foo') + end + + subject(:request) { make_request.body } + + before do + stubs.get('/foo') { [200, { 'Content-Type' => 'text/html' }, 'hello'] } + stubs.get('/foo') { [200, { 'Content-Type' => 'text/html' }, 'world'] } + end + + context 'the first request' do + it { is_expected.to eq 'hello' } + end + + context 'the second request' do + before do + make_request + end + + it { is_expected.to eq 'world' } + end + end + + describe 'yielding env to stubs' do + subject { connection.get('http://foo.com/foo?a=1').body } + + before do + stubs.get '/foo' do |env| + expect(env[:url].path).to eq '/foo' + expect(env[:url].host).to eq 'foo.com' + expect(env[:params]['a']).to eq '1' + expect(env[:request_headers]['Accept']).to eq 'text/plain' + [200, {}, 'a'] + end + + connection.headers['Accept'] = 'text/plain' + end + + it { is_expected.to eq 'a' } + end + + describe 'params parsing' do + subject { connection.get('http://foo.com/foo?a[b]=1').body } + + context 'with default encoder' do + before do + stubs.get '/foo' do |env| + expect(env[:params]['a']['b']).to eq '1' + [200, {}, 'a'] + end + end + + it { is_expected.to eq 'a' } + end + + context 'with nested encoder' do + before do + stubs.get '/foo' do |env| + expect(env[:params]['a']['b']).to eq '1' + [200, {}, 'a'] + end + + connection.options.params_encoder = Faraday::NestedParamsEncoder + end + + it { is_expected.to eq 'a' } + end + + context 'with flat encoder' do + before do + stubs.get '/foo' do |env| + expect(env[:params]['a[b]']).to eq '1' + [200, {}, 'a'] + end + + connection.options.params_encoder = Faraday::FlatParamsEncoder + end + + it { is_expected.to eq 'a' } + end + end + + describe 'raising an error if no stub was found' do + describe 'for request' do + subject(:request) { connection.get('/invalid') { [200, {}, []] } } + + it { expect { request }.to raise_error described_class::Stubs::NotFound } + end + + describe 'for specified host' do + subject(:request) { connection.get('http://domain.test/bait') } + + it { expect { request }.to raise_error described_class::Stubs::NotFound } + end + + describe 'for request without specified header' do + subject(:request) { connection.get('/yo') } + + before do + stubs.get('/yo', 'X-HELLO' => 'hello') { [200, {}, 'a'] } + end + + it { expect { request }.to raise_error described_class::Stubs::NotFound } + end + end + + describe 'for request with non default params encoder' do + let(:connection) do + Faraday.new(request: { params_encoder: Faraday::FlatParamsEncoder }) do |builder| + builder.adapter :test, stubs + end + end + let(:stubs) do + described_class::Stubs.new do |stubs| + stubs.get('/path?a=x&a=y&a=z') { [200, {}, 'a'] } + end + end + + context 'when all flat param values are correctly set' do + subject(:request) { connection.get('/path?a=x&a=y&a=z') } + + it { expect(request.status).to eq 200 } + end + + shared_examples 'raise NotFound when params do not satisfy the flat param values' do |params| + subject(:request) { connection.get('/path', params) } + + context "with #{params.inspect}" do + it { expect { request }.to raise_error described_class::Stubs::NotFound } + end + end + + it_behaves_like 'raise NotFound when params do not satisfy the flat param values', { a: %w[x] } + it_behaves_like 'raise NotFound when params do not satisfy the flat param values', { a: %w[x y] } + it_behaves_like 'raise NotFound when params do not satisfy the flat param values', { a: %w[x z y] } # NOTE: The order of the value is also compared. + it_behaves_like 'raise NotFound when params do not satisfy the flat param values', { b: %w[x y z] } + end + + describe 'strict_mode' do + let(:stubs) do + described_class::Stubs.new(strict_mode: true) do |stubs| + stubs.get('/strict?a=12&b=xy', 'Authorization' => 'Bearer m_ck', 'X-C' => 'hello') { [200, {}, 'a'] } + stubs.get('/with_user_agent?a=12&b=xy', authorization: 'Bearer m_ck', 'User-Agent' => 'My Agent') { [200, {}, 'a'] } + end + end + + context 'when params and headers are exactly set' do + subject(:request) { connection.get('/strict', { a: '12', b: 'xy' }, { authorization: 'Bearer m_ck', x_c: 'hello' }) } + + it { expect(request.status).to eq 200 } + end + + context 'when params and headers are exactly set with a custom user agent' do + subject(:request) { connection.get('/with_user_agent', { a: '12', b: 'xy' }, { authorization: 'Bearer m_ck', 'User-Agent' => 'My Agent' }) } + + it { expect(request.status).to eq 200 } + end + + shared_examples 'raise NotFound when params do not satisfy the strict check' do |params| + subject(:request) { connection.get('/strict', params, { 'Authorization' => 'Bearer m_ck', 'X-C' => 'hello' }) } + + context "with #{params.inspect}" do + it { expect { request }.to raise_error described_class::Stubs::NotFound } + end + end + + it_behaves_like 'raise NotFound when params do not satisfy the strict check', { a: '12' } + it_behaves_like 'raise NotFound when params do not satisfy the strict check', { b: 'xy' } + it_behaves_like 'raise NotFound when params do not satisfy the strict check', { a: '123', b: 'xy' } + it_behaves_like 'raise NotFound when params do not satisfy the strict check', { a: '12', b: 'xyz' } + it_behaves_like 'raise NotFound when params do not satisfy the strict check', { a: '12', b: 'xy', c: 'hello' } + it_behaves_like 'raise NotFound when params do not satisfy the strict check', { additional: 'special', a: '12', b: 'xy', c: 'hello' } + + shared_examples 'raise NotFound when headers do not satisfy the strict check' do |path, headers| + subject(:request) { connection.get(path, { a: 12, b: 'xy' }, headers) } + + context "with #{headers.inspect}" do + it { expect { request }.to raise_error described_class::Stubs::NotFound } + end + end + + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/strict', { authorization: 'Bearer m_ck' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/strict', { 'X-C' => 'hello' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/strict', { authorization: 'Bearer m_ck', 'x-c': 'Hi' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/strict', { authorization: 'Basic m_ck', 'x-c': 'hello' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/strict', { authorization: 'Bearer m_ck', 'x-c': 'hello', x_special: 'special' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/with_user_agent', { authorization: 'Bearer m_ck' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/with_user_agent', { authorization: 'Bearer m_ck', user_agent: 'Unknown' } + it_behaves_like 'raise NotFound when headers do not satisfy the strict check', '/with_user_agent', { authorization: 'Bearer m_ck', user_agent: 'My Agent', x_special: 'special' } + + context 'when strict_mode is disabled' do + before do + stubs.strict_mode = false + end + + shared_examples 'does not raise NotFound even when params do not satisfy the strict check' do |params| + subject(:request) { connection.get('/strict', params, { 'Authorization' => 'Bearer m_ck', 'X-C' => 'hello' }) } + + context "with #{params.inspect}" do + it { expect(request.status).to eq 200 } + end + end + + it_behaves_like 'does not raise NotFound even when params do not satisfy the strict check', { a: '12', b: 'xy' } + it_behaves_like 'does not raise NotFound even when params do not satisfy the strict check', { a: '12', b: 'xy', c: 'hello' } + it_behaves_like 'does not raise NotFound even when params do not satisfy the strict check', { additional: 'special', a: '12', b: 'xy', c: 'hello' } + + shared_examples 'does not raise NotFound even when headers do not satisfy the strict check' do |path, headers| + subject(:request) { connection.get(path, { a: 12, b: 'xy' }, headers) } + + context "with #{headers.inspect}" do + it { expect(request.status).to eq 200 } + end + end + + it_behaves_like 'does not raise NotFound even when headers do not satisfy the strict check', '/strict', { authorization: 'Bearer m_ck', 'x-c': 'hello' } + it_behaves_like 'does not raise NotFound even when headers do not satisfy the strict check', '/strict', { authorization: 'Bearer m_ck', 'x-c': 'hello', x_special: 'special' } + it_behaves_like 'does not raise NotFound even when headers do not satisfy the strict check', '/strict', { authorization: 'Bearer m_ck', 'x-c': 'hello', user_agent: 'Special Agent' } + it_behaves_like 'does not raise NotFound even when headers do not satisfy the strict check', '/with_user_agent', { authorization: 'Bearer m_ck', user_agent: 'My Agent' } + it_behaves_like 'does not raise NotFound even when headers do not satisfy the strict check', '/with_user_agent', { authorization: 'Bearer m_ck', user_agent: 'My Agent', x_special: 'special' } + end + + describe 'body_match?' do + let(:stubs) do + described_class::Stubs.new do |stubs| + stubs.post('/no_check') { [200, {}, 'ok'] } + stubs.post('/with_string', 'abc') { [200, {}, 'ok'] } + stubs.post( + '/with_proc', + ->(request_body) { JSON.parse(request_body, symbolize_names: true) == { x: '!', a: [{ m: [{ a: true }], n: 123 }] } }, + { content_type: 'application/json' } + ) do + [200, {}, 'ok'] + end + end + end + + context 'when trying without any args for body' do + subject(:without_body) { connection.post('/no_check') } + + it { expect(without_body.status).to eq 200 } + end + + context 'when trying with string body stubs' do + subject(:with_string) { connection.post('/with_string', 'abc') } + + it { expect(with_string.status).to eq 200 } + end + + context 'when trying with proc body stubs' do + subject(:with_proc) do + connection.post('/with_proc', JSON.dump(a: [{ n: 123, m: [{ a: true }] }], x: '!'), { 'Content-Type' => 'application/json' }) + end + + it { expect(with_proc.status).to eq 200 } + end + end + end + + describe 'request timeout' do + subject(:request) do + connection.get('/sleep') do |req| + req.options.timeout = timeout + end + end + + before do + stubs.get('/sleep') do + sleep(0.01) + [200, {}, ''] + end + end + + context 'when request is within timeout' do + let(:timeout) { 1 } + + it { expect(request.status).to eq 200 } + end + + context 'when request is too slow' do + let(:timeout) { 0.001 } + + it 'raises an exception' do + expect { request }.to raise_error(Faraday::TimeoutError) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_registry_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_registry_spec.rb new file mode 100644 index 000000000..222e65ef5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_registry_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::AdapterRegistry do + describe '#initialize' do + subject(:registry) { described_class.new } + + it { expect { registry.get(:FinFangFoom) }.to raise_error(NameError) } + it { expect { registry.get('FinFangFoom') }.to raise_error(NameError) } + + it 'looks up class by string name' do + expect(registry.get('Faraday::Connection')).to eq(Faraday::Connection) + end + + it 'looks up class by symbol name' do + expect(registry.get(:Faraday)).to eq(Faraday) + end + + it 'caches lookups with implicit name' do + registry.set :symbol + expect(registry.get('symbol')).to eq(:symbol) + end + + it 'caches lookups with explicit name' do + registry.set 'string', :name + expect(registry.get(:name)).to eq('string') + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_spec.rb new file mode 100644 index 000000000..22ef1d149 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/adapter_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Adapter do + let(:adapter) { Faraday::Adapter.new } + let(:request) { {} } + + context '#request_timeout' do + it 'gets :read timeout' do + expect(timeout(:read)).to eq(nil) + + request[:timeout] = 5 + request[:write_timeout] = 1 + + expect(timeout(:read)).to eq(5) + + request[:read_timeout] = 2 + + expect(timeout(:read)).to eq(2) + end + + it 'gets :open timeout' do + expect(timeout(:open)).to eq(nil) + + request[:timeout] = 5 + request[:write_timeout] = 1 + + expect(timeout(:open)).to eq(5) + + request[:open_timeout] = 2 + + expect(timeout(:open)).to eq(2) + end + + it 'gets :write timeout' do + expect(timeout(:write)).to eq(nil) + + request[:timeout] = 5 + request[:read_timeout] = 1 + + expect(timeout(:write)).to eq(5) + + request[:write_timeout] = 2 + + expect(timeout(:write)).to eq(2) + end + + it 'attempts unknown timeout type' do + expect { timeout(:unknown) }.to raise_error(ArgumentError) + end + + def timeout(type) + adapter.send(:request_timeout, type, request) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/connection_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/connection_spec.rb new file mode 100644 index 000000000..7fa726a4c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/connection_spec.rb @@ -0,0 +1,808 @@ +# frozen_string_literal: true + +class CustomEncoder + def encode(params) + params.map { |k, v| "#{k.upcase}-#{v.to_s.upcase}" }.join(',') + end + + def decode(params) + params.split(',').to_h { |pair| pair.split('-') } + end +end + +shared_examples 'initializer with url' do + context 'with simple url' do + let(:address) { 'http://httpbingo.org' } + + it { expect(subject.host).to eq('httpbingo.org') } + it { expect(subject.port).to eq(80) } + it { expect(subject.scheme).to eq('http') } + it { expect(subject.path_prefix).to eq('/') } + it { expect(subject.params).to eq({}) } + end + + context 'with complex url' do + let(:address) { 'http://httpbingo.org:815/fish?a=1' } + + it { expect(subject.port).to eq(815) } + it { expect(subject.path_prefix).to eq('/fish') } + it { expect(subject.params).to eq('a' => '1') } + end + + context 'with IPv6 address' do + let(:address) { 'http://[::1]:85/' } + + it { expect(subject.host).to eq('[::1]') } + it { expect(subject.port).to eq(85) } + end +end + +shared_examples 'default connection options' do + after { Faraday.default_connection_options = nil } + + it 'works with implicit url' do + conn = Faraday.new 'http://httpbingo.org/foo' + expect(conn.options.timeout).to eq(10) + end + + it 'works with option url' do + conn = Faraday.new url: 'http://httpbingo.org/foo' + expect(conn.options.timeout).to eq(10) + end + + it 'works with instance connection options' do + conn = Faraday.new 'http://httpbingo.org/foo', request: { open_timeout: 1 } + expect(conn.options.timeout).to eq(10) + expect(conn.options.open_timeout).to eq(1) + end + + it 'default connection options persist with an instance overriding' do + conn = Faraday.new 'http://nigiri.com/bar' + conn.options.timeout = 1 + expect(Faraday.default_connection_options.request.timeout).to eq(10) + + other = Faraday.new url: 'https://httpbingo.org/foo' + other.options.timeout = 1 + + expect(Faraday.default_connection_options.request.timeout).to eq(10) + end + + it 'default connection uses default connection options' do + expect(Faraday.default_connection.options.timeout).to eq(10) + end +end + +RSpec.describe Faraday::Connection do + let(:conn) { Faraday::Connection.new(url, options) } + let(:url) { nil } + let(:options) { nil } + + describe '.new' do + subject { conn } + + context 'with implicit url param' do + # Faraday::Connection.new('http://httpbingo.org') + let(:url) { address } + + it_behaves_like 'initializer with url' + end + + context 'with explicit url param' do + # Faraday::Connection.new(url: 'http://httpbingo.org') + let(:url) { { url: address } } + + it_behaves_like 'initializer with url' + end + + context 'with custom builder' do + let(:custom_builder) { Faraday::RackBuilder.new } + let(:options) { { builder: custom_builder } } + + it { expect(subject.builder).to eq(custom_builder) } + end + + context 'with custom params' do + let(:options) { { params: { a: 1 } } } + + it { expect(subject.params).to eq('a' => 1) } + end + + context 'with custom params and params in url' do + let(:url) { 'http://httpbingo.org/fish?a=1&b=2' } + let(:options) { { params: { a: 3 } } } + it { expect(subject.params).to eq('a' => 3, 'b' => '2') } + end + + context 'with basic_auth in url' do + let(:url) { 'http://Aladdin:open%20sesame@httpbingo.org/fish' } + + it { expect(subject.headers['Authorization']).to eq('Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==') } + end + + context 'with custom headers' do + let(:options) { { headers: { user_agent: 'Faraday' } } } + + it { expect(subject.headers['User-agent']).to eq('Faraday') } + end + + context 'with ssl false' do + let(:options) { { ssl: { verify: false } } } + + it { expect(subject.ssl.verify?).to be_falsey } + end + + context 'with verify_hostname false' do + let(:options) { { ssl: { verify_hostname: false } } } + + it { expect(subject.ssl.verify_hostname?).to be_falsey } + end + + context 'with empty block' do + let(:conn) { Faraday::Connection.new {} } + + it { expect(conn.builder.handlers.size).to eq(0) } + end + + context 'with block' do + let(:conn) do + Faraday::Connection.new(params: { 'a' => '1' }) do |faraday| + faraday.adapter :test + faraday.url_prefix = 'http://httpbingo.org/omnom' + end + end + + it { expect(conn.builder.handlers.size).to eq(0) } + it { expect(conn.path_prefix).to eq('/omnom') } + end + end + + describe '#close' do + before { Faraday.default_adapter = :test } + after { Faraday.default_adapter = nil } + + it 'can close underlying app' do + expect(conn.app).to receive(:close) + conn.close + end + end + + describe '#build_exclusive_url' do + context 'with relative path' do + subject { conn.build_exclusive_url('sake.html') } + + it 'uses connection host as default host' do + conn.host = 'httpbingo.org' + expect(subject.host).to eq('httpbingo.org') + expect(subject.scheme).to eq('http') + end + + it do + conn.path_prefix = '/fish' + expect(subject.path).to eq('/fish/sake.html') + end + + it do + conn.path_prefix = '/' + expect(subject.path).to eq('/sake.html') + end + + it do + conn.path_prefix = 'fish' + expect(subject.path).to eq('/fish/sake.html') + end + + it do + conn.path_prefix = '/fish/' + expect(subject.path).to eq('/fish/sake.html') + end + end + + context 'with absolute path' do + subject { conn.build_exclusive_url('/sake.html') } + + after { expect(subject.path).to eq('/sake.html') } + + it { conn.path_prefix = '/fish' } + it { conn.path_prefix = '/' } + it { conn.path_prefix = 'fish' } + it { conn.path_prefix = '/fish/' } + end + + context 'with complete url' do + subject { conn.build_exclusive_url('http://httpbingo.org/sake.html?a=1') } + + it { expect(subject.scheme).to eq('http') } + it { expect(subject.host).to eq('httpbingo.org') } + it { expect(subject.port).to eq(80) } + it { expect(subject.path).to eq('/sake.html') } + it { expect(subject.query).to eq('a=1') } + end + + it 'overrides connection port for absolute url' do + conn.port = 23 + uri = conn.build_exclusive_url('http://httpbingo.org') + expect(uri.port).to eq(80) + end + + it 'does not add ending slash given nil url' do + conn.url_prefix = 'http://httpbingo.org/nigiri' + uri = conn.build_exclusive_url + expect(uri.path).to eq('/nigiri') + end + + it 'does not add ending slash given empty url' do + conn.url_prefix = 'http://httpbingo.org/nigiri' + uri = conn.build_exclusive_url('') + expect(uri.path).to eq('/nigiri') + end + + it 'does not use connection params' do + conn.url_prefix = 'http://httpbingo.org/nigiri' + conn.params = { a: 1 } + expect(conn.build_exclusive_url.to_s).to eq('http://httpbingo.org/nigiri') + end + + it 'allows to provide params argument' do + conn.url_prefix = 'http://httpbingo.org/nigiri' + conn.params = { a: 1 } + params = Faraday::Utils::ParamsHash.new + params[:a] = 2 + uri = conn.build_exclusive_url(nil, params) + expect(uri.to_s).to eq('http://httpbingo.org/nigiri?a=2') + end + + it 'handles uri instances' do + uri = conn.build_exclusive_url(URI('/sake.html')) + expect(uri.path).to eq('/sake.html') + end + + it 'always returns new URI instance' do + conn.url_prefix = 'http://httpbingo.org' + uri1 = conn.build_exclusive_url(nil) + uri2 = conn.build_exclusive_url(nil) + expect(uri1).not_to equal(uri2) + end + + context 'with url_prefixed connection' do + let(:url) { 'http://httpbingo.org/get/' } + + it 'parses url and changes scheme' do + conn.scheme = 'https' + uri = conn.build_exclusive_url('sake.html') + expect(uri.to_s).to eq('https://httpbingo.org/get/sake.html') + end + + it 'joins url to base with ending slash' do + uri = conn.build_exclusive_url('sake.html') + expect(uri.to_s).to eq('http://httpbingo.org/get/sake.html') + end + + it 'used default base with ending slash' do + uri = conn.build_exclusive_url + expect(uri.to_s).to eq('http://httpbingo.org/get/') + end + + it 'overrides base' do + uri = conn.build_exclusive_url('/sake/') + expect(uri.to_s).to eq('http://httpbingo.org/sake/') + end + end + + context 'with colon in path' do + let(:url) { 'http://service.com' } + + it 'joins url to base when used absolute path' do + conn = Faraday.new(url: url) + uri = conn.build_exclusive_url('/service:search?limit=400') + expect(uri.to_s).to eq('http://service.com/service:search?limit=400') + end + + it 'joins url to base when used relative path' do + conn = Faraday.new(url: url) + uri = conn.build_exclusive_url('service:search?limit=400') + expect(uri.to_s).to eq('http://service.com/service:search?limit=400') + end + + it 'joins url to base when used with path prefix' do + conn = Faraday.new(url: url) + conn.path_prefix = '/api' + uri = conn.build_exclusive_url('service:search?limit=400') + expect(uri.to_s).to eq('http://service.com/api/service:search?limit=400') + end + end + + context 'with a custom `default_uri_parser`' do + let(:url) { 'http://httpbingo.org' } + let(:parser) { Addressable::URI } + + around do |example| + with_default_uri_parser(parser) do + example.run + end + end + + it 'does not raise error' do + expect { conn.build_exclusive_url('/nigiri') }.not_to raise_error + end + end + end + + describe '#build_url' do + let(:url) { 'http://httpbingo.org/nigiri' } + + it 'uses params' do + conn.params = { a: 1, b: 1 } + expect(conn.build_url.to_s).to eq('http://httpbingo.org/nigiri?a=1&b=1') + end + + it 'merges params' do + conn.params = { a: 1, b: 1 } + url = conn.build_url(nil, b: 2, c: 3) + expect(url.to_s).to eq('http://httpbingo.org/nigiri?a=1&b=2&c=3') + end + end + + describe '#build_request' do + let(:url) { 'https://ahttpbingo.org/sake.html' } + let(:request) { conn.build_request(:get) } + + before do + conn.headers = { 'Authorization' => 'token abc123' } + request.headers.delete('Authorization') + end + + it { expect(conn.headers.keys).to eq(['Authorization']) } + it { expect(conn.headers.include?('Authorization')).to be_truthy } + it { expect(request.headers.keys).to be_empty } + it { expect(request.headers.include?('Authorization')).to be_falsey } + end + + describe '#to_env' do + subject { conn.build_request(:get).to_env(conn).url } + + let(:url) { 'http://httpbingo.org/sake.html' } + let(:options) { { params: @params } } + + it 'parses url params into query' do + @params = { 'a[b]' => '1 + 2' } + expect(subject.query).to eq('a%5Bb%5D=1+%2B+2') + end + + it 'escapes per spec' do + @params = { 'a' => '1+2 foo~bar.-baz' } + expect(subject.query).to eq('a=1%2B2+foo~bar.-baz') + end + + it 'bracketizes nested params in query' do + @params = { 'a' => { 'b' => 'c' } } + expect(subject.query).to eq('a%5Bb%5D=c') + end + + it 'bracketizes repeated params in query' do + @params = { 'a' => [1, 2] } + expect(subject.query).to eq('a%5B%5D=1&a%5B%5D=2') + end + + it 'without braketizing repeated params in query' do + @params = { 'a' => [1, 2] } + conn.options.params_encoder = Faraday::FlatParamsEncoder + expect(subject.query).to eq('a=1&a=2') + end + end + + describe 'proxy support' do + it 'accepts string' do + with_env 'http_proxy' => 'http://env-proxy.com:80' do + conn.proxy = 'http://proxy.com' + expect(conn.proxy.host).to eq('proxy.com') + end + end + + it 'accepts uri' do + with_env 'http_proxy' => 'http://env-proxy.com:80' do + conn.proxy = URI.parse('http://proxy.com') + expect(conn.proxy.host).to eq('proxy.com') + end + end + + it 'accepts hash with string uri' do + with_env 'http_proxy' => 'http://env-proxy.com:80' do + conn.proxy = { uri: 'http://proxy.com', user: 'rick' } + expect(conn.proxy.host).to eq('proxy.com') + expect(conn.proxy.user).to eq('rick') + end + end + + it 'accepts hash' do + with_env 'http_proxy' => 'http://env-proxy.com:80' do + conn.proxy = { uri: URI.parse('http://proxy.com'), user: 'rick' } + expect(conn.proxy.host).to eq('proxy.com') + expect(conn.proxy.user).to eq('rick') + end + end + + it 'accepts http env' do + with_env 'http_proxy' => 'http://env-proxy.com:80' do + expect(conn.proxy.host).to eq('env-proxy.com') + end + end + + it 'accepts http env with auth' do + with_env 'http_proxy' => 'http://a%40b:my%20pass@proxy.com:80' do + expect(conn.proxy.user).to eq('a@b') + expect(conn.proxy.password).to eq('my pass') + end + end + + it 'accepts env without scheme' do + with_env 'http_proxy' => 'localhost:8888' do + uri = conn.proxy[:uri] + expect(uri.host).to eq('localhost') + expect(uri.port).to eq(8888) + end + end + + it 'fetches no proxy from nil env' do + with_env 'http_proxy' => nil do + expect(conn.proxy).to be_nil + end + end + + it 'fetches no proxy from blank env' do + with_env 'http_proxy' => '' do + expect(conn.proxy).to be_nil + end + end + + it 'does not accept uppercase env' do + with_env 'HTTP_PROXY' => 'http://localhost:8888/' do + expect(conn.proxy).to be_nil + end + end + + it 'allows when url in no proxy list' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do + conn = Faraday::Connection.new('http://example.com') + expect(conn.proxy).to be_nil + end + end + + it 'allows when url in no proxy list with url_prefix' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do + conn = Faraday::Connection.new + conn.url_prefix = 'http://example.com' + expect(conn.proxy).to be_nil + end + end + + it 'allows when prefixed url is not in no proxy list' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do + conn = Faraday::Connection.new('http://prefixedexample.com') + expect(conn.proxy.host).to eq('proxy.com') + end + end + + it 'allows when subdomain url is in no proxy list' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do + conn = Faraday::Connection.new('http://subdomain.example.com') + expect(conn.proxy).to be_nil + end + end + + it 'allows when url not in no proxy list' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example2.com' do + conn = Faraday::Connection.new('http://example.com') + expect(conn.proxy.host).to eq('proxy.com') + end + end + + it 'allows when ip address is not in no proxy list but url is' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'localhost' do + conn = Faraday::Connection.new('http://127.0.0.1') + expect(conn.proxy).to be_nil + end + end + + it 'allows when url is not in no proxy list but ip address is' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => '127.0.0.1' do + conn = Faraday::Connection.new('http://localhost') + expect(conn.proxy).to be_nil + end + end + + it 'allows in multi element no proxy list' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example0.com,example.com,example1.com' do + expect(Faraday::Connection.new('http://example0.com').proxy).to be_nil + expect(Faraday::Connection.new('http://example.com').proxy).to be_nil + expect(Faraday::Connection.new('http://example1.com').proxy).to be_nil + expect(Faraday::Connection.new('http://example2.com').proxy.host).to eq('proxy.com') + end + end + + it 'test proxy requires uri' do + expect { conn.proxy = { uri: :bad_uri, user: 'rick' } }.to raise_error(ArgumentError) + end + + it 'uses env http_proxy' do + with_env 'http_proxy' => 'http://proxy.com' do + conn = Faraday.new + expect(conn.instance_variable_get(:@manual_proxy)).to be_falsey + expect(conn.proxy_for_request('http://google.co.uk').host).to eq('proxy.com') + end + end + + it 'uses processes no_proxy before http_proxy' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'google.co.uk' do + conn = Faraday.new + expect(conn.instance_variable_get(:@manual_proxy)).to be_falsey + expect(conn.proxy_for_request('http://google.co.uk')).to be_nil + end + end + + it 'uses env https_proxy' do + with_env 'https_proxy' => 'https://proxy.com' do + conn = Faraday.new + expect(conn.instance_variable_get(:@manual_proxy)).to be_falsey + expect(conn.proxy_for_request('https://google.co.uk').host).to eq('proxy.com') + end + end + + it 'uses processes no_proxy before https_proxy' do + with_env 'https_proxy' => 'https://proxy.com', 'no_proxy' => 'google.co.uk' do + conn = Faraday.new + expect(conn.instance_variable_get(:@manual_proxy)).to be_falsey + expect(conn.proxy_for_request('https://google.co.uk')).to be_nil + end + end + + it 'gives priority to manually set proxy' do + with_env 'https_proxy' => 'https://proxy.com', 'no_proxy' => 'google.co.uk' do + conn = Faraday.new + conn.proxy = 'http://proxy2.com' + + expect(conn.instance_variable_get(:@manual_proxy)).to be_truthy + expect(conn.proxy_for_request('https://google.co.uk').host).to eq('proxy2.com') + end + end + + it 'ignores env proxy if set that way' do + with_env_proxy_disabled do + with_env 'http_proxy' => 'http://duncan.proxy.com:80' do + expect(conn.proxy).to be_nil + end + end + end + + context 'performing a request' do + let(:url) { 'http://example.com' } + let(:conn) do + Faraday.new do |f| + f.adapter :test do |stubs| + stubs.get(url) do + [200, {}, 'ok'] + end + end + end + end + + it 'dynamically checks proxy' do + with_env 'http_proxy' => 'http://proxy.com:80' do + expect(conn.proxy.uri.host).to eq('proxy.com') + + conn.get(url) do |req| + expect(req.options.proxy.uri.host).to eq('proxy.com') + end + end + + conn.get(url) + expect(conn.instance_variable_get(:@temp_proxy)).to be_nil + end + + it 'dynamically check no proxy' do + with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do + expect(conn.proxy.uri.host).to eq('proxy.com') + + conn.get('http://example.com') do |req| + expect(req.options.proxy).to be_nil + end + end + end + end + end + + describe '#dup' do + subject { conn.dup } + + let(:url) { 'http://httpbingo.org/foo' } + let(:options) do + { + ssl: { verify: :none }, + headers: { 'content-type' => 'text/plain' }, + params: { 'a' => '1' }, + request: { timeout: 5 } + } + end + + it { expect(subject.build_exclusive_url).to eq(conn.build_exclusive_url) } + it { expect(subject.headers['content-type']).to eq('text/plain') } + it { expect(subject.params['a']).to eq('1') } + + context 'after manual changes' do + before do + subject.headers['content-length'] = 12 + subject.params['b'] = '2' + subject.options[:open_timeout] = 10 + end + + it { expect(subject.builder.handlers.size).to eq(1) } + it { expect(conn.builder.handlers.size).to eq(1) } + it { expect(conn.headers.key?('content-length')).to be_falsey } + it { expect(conn.params.key?('b')).to be_falsey } + it { expect(subject.options[:timeout]).to eq(5) } + it { expect(conn.options[:open_timeout]).to be_nil } + end + end + + describe '#respond_to?' do + it { expect(Faraday.respond_to?(:get)).to be_truthy } + it { expect(Faraday.respond_to?(:post)).to be_truthy } + end + + describe 'default_connection_options' do + context 'assigning a default value' do + before do + Faraday.default_connection_options = nil + Faraday.default_connection_options.request.timeout = 10 + end + + it_behaves_like 'default connection options' + end + + context 'assigning a hash' do + before { Faraday.default_connection_options = { request: { timeout: 10 } } } + + it_behaves_like 'default connection options' + end + + context 'preserving a user_agent assigned via default_conncetion_options' do + around do |example| + old = Faraday.default_connection_options + Faraday.default_connection_options = { headers: { user_agent: 'My Agent 1.2' } } + example.run + Faraday.default_connection_options = old + end + + context 'when url is a Hash' do + let(:conn) { Faraday.new(url: 'http://example.co', headers: { 'CustomHeader' => 'CustomValue' }) } + + it { expect(conn.headers).to eq('CustomHeader' => 'CustomValue', 'User-Agent' => 'My Agent 1.2') } + end + + context 'when url is a String' do + let(:conn) { Faraday.new('http://example.co', headers: { 'CustomHeader' => 'CustomValue' }) } + + it { expect(conn.headers).to eq('CustomHeader' => 'CustomValue', 'User-Agent' => 'My Agent 1.2') } + end + end + end + + describe 'request params' do + context 'with simple url' do + let(:url) { 'http://example.com' } + let(:stubs) { Faraday::Adapter::Test::Stubs.new } + + before do + conn.adapter(:test, stubs) + stubs.get('http://example.com?a=a&p=3') do + [200, {}, 'ok'] + end + end + + after { stubs.verify_stubbed_calls } + + it 'test_overrides_request_params' do + conn.get('?p=2&a=a', p: 3) + end + + it 'test_overrides_request_params_block' do + conn.get('?p=1&a=a', p: 2) do |req| + req.params[:p] = 3 + end + end + + it 'test_overrides_request_params_block_url' do + conn.get(nil, p: 2) do |req| + req.url('?p=1&a=a', 'p' => 3) + end + end + end + + context 'with url and extra params' do + let(:url) { 'http://example.com?a=1&b=2' } + let(:options) { { params: { c: 3 } } } + let(:stubs) { Faraday::Adapter::Test::Stubs.new } + + before do + conn.adapter(:test, stubs) + end + + it 'merges connection and request params' do + expected = 'http://example.com?a=1&b=2&c=3&limit=5&page=1' + stubs.get(expected) { [200, {}, 'ok'] } + conn.get('?page=1', limit: 5) + stubs.verify_stubbed_calls + end + + it 'allows to override all params' do + expected = 'http://example.com?b=b' + stubs.get(expected) { [200, {}, 'ok'] } + conn.get('?p=1&a=a', p: 2) do |req| + expect(req.params[:a]).to eq('a') + expect(req.params['c']).to eq(3) + expect(req.params['p']).to eq(2) + req.params = { b: 'b' } + expect(req.params['b']).to eq('b') + end + stubs.verify_stubbed_calls + end + + it 'allows to set params_encoder for single request' do + encoder = CustomEncoder.new + expected = 'http://example.com/?A-1,B-2,C-3,FEELING-BLUE' + stubs.get(expected) { [200, {}, 'ok'] } + + conn.get('/', a: 1, b: 2, c: 3, feeling: 'blue') do |req| + req.options.params_encoder = encoder + end + stubs.verify_stubbed_calls + end + end + + context 'with default params encoder' do + let(:stubs) { Faraday::Adapter::Test::Stubs.new } + + before do + conn.adapter(:test, stubs) + stubs.get('http://example.com?color%5B%5D=blue&color%5B%5D=red') do + [200, {}, 'ok'] + end + end + + after { stubs.verify_stubbed_calls } + + it 'supports array params in url' do + conn.get('http://example.com?color[]=blue&color[]=red') + end + + it 'supports array params in params' do + conn.get('http://example.com', color: %w[blue red]) + end + end + + context 'with flat params encoder' do + let(:options) { { request: { params_encoder: Faraday::FlatParamsEncoder } } } + let(:stubs) { Faraday::Adapter::Test::Stubs.new } + + before do + conn.adapter(:test, stubs) + stubs.get('http://example.com?color=blue&color=red') do + [200, {}, 'ok'] + end + end + + after { stubs.verify_stubbed_calls } + + it 'supports array params in params' do + conn.get('http://example.com', color: %w[blue red]) + end + + context 'with array param in url' do + let(:url) { 'http://example.com?color[]=blue&color[]=red' } + + it do + conn.get('/') + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/error_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/error_spec.rb new file mode 100644 index 000000000..bb5007713 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/error_spec.rb @@ -0,0 +1,175 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Error do + describe '.initialize' do + subject { described_class.new(exception, response) } + let(:response) { nil } + + context 'with exception only' do + let(:exception) { RuntimeError.new('test') } + + it { expect(subject.wrapped_exception).to eq(exception) } + it { expect(subject.response).to be_nil } + it { expect(subject.message).to eq(exception.message) } + it { expect(subject.backtrace).to eq(exception.backtrace) } + it { expect(subject.inspect).to eq('#>') } + it { expect(subject.response_status).to be_nil } + it { expect(subject.response_headers).to be_nil } + it { expect(subject.response_body).to be_nil } + end + + context 'with response hash' do + let(:exception) { { status: 400 } } + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status 400 - method and url are not available due to include_request: false on Faraday::Response::RaiseError middleware') } + if RUBY_VERSION >= '3.4' + it { expect(subject.inspect).to eq('#') } + else + it { expect(subject.inspect).to eq('#400}>') } + end + it { expect(subject.response_status).to eq(400) } + it { expect(subject.response_headers).to be_nil } + it { expect(subject.response_body).to be_nil } + end + + context 'with string' do + let(:exception) { 'custom message' } + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to be_nil } + it { expect(subject.message).to eq('custom message') } + it { expect(subject.inspect).to eq('#>') } + it { expect(subject.response_status).to be_nil } + it { expect(subject.response_headers).to be_nil } + it { expect(subject.response_body).to be_nil } + end + + context 'with anything else #to_s' do + let(:exception) { %w[error1 error2] } + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to be_nil } + it { expect(subject.message).to eq('["error1", "error2"]') } + it { expect(subject.inspect).to eq('#>') } + it { expect(subject.response_status).to be_nil } + it { expect(subject.response_headers).to be_nil } + it { expect(subject.response_body).to be_nil } + end + + context 'with exception string and response hash' do + let(:exception) { 'custom message' } + let(:response) { { status: 400 } } + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(response) } + it { expect(subject.message).to eq('custom message') } + if RUBY_VERSION >= '3.4' + it { expect(subject.inspect).to eq('#') } + else + it { expect(subject.inspect).to eq('#400}>') } + end + it { expect(subject.response_status).to eq(400) } + it { expect(subject.response_headers).to be_nil } + it { expect(subject.response_body).to be_nil } + end + + context 'with exception and response object' do + let(:exception) { RuntimeError.new('test') } + let(:body) { { test: 'test' } } + let(:headers) { { 'Content-Type' => 'application/json' } } + let(:response) { Faraday::Response.new(status: 400, response_headers: headers, response_body: body) } + + it { expect(subject.wrapped_exception).to eq(exception) } + it { expect(subject.response).to eq(response) } + it { expect(subject.message).to eq(exception.message) } + it { expect(subject.backtrace).to eq(exception.backtrace) } + it { expect(subject.response_status).to eq(400) } + it { expect(subject.response_headers).to eq(headers) } + it { expect(subject.response_body).to eq(body) } + end + + context 'with hash missing status key' do + let(:exception) { { body: 'error body' } } + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status - method and url are not available due to include_request: false on Faraday::Response::RaiseError middleware') } + end + + context 'with hash with status but missing request data' do + let(:exception) { { status: 404, body: 'not found' } } # missing request key + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status 404 - method and url are not available due to include_request: false on Faraday::Response::RaiseError middleware') } + end + + context 'with hash with status and request but missing method in request' do + let(:exception) { { status: 404, body: 'not found', request: { url: 'http://example.com/test' } } } # missing method + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status 404 for http://example.com/test') } + end + + context 'with hash with status and request but missing url in request' do + let(:exception) { { status: 404, body: 'not found', request: { method: :get } } } # missing url + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status 404 for GET ') } + end + + context 'with properly formed Faraday::Env' do + # This represents the normal case - a well-formed Faraday::Env object + # with all the standard properties populated as they would be during + # a typical HTTP request/response cycle + let(:exception) { Faraday::Env.new } + + before do + exception.status = 500 + exception.method = :post + exception.url = URI('https://api.example.com/users') + exception.request = Faraday::RequestOptions.new + exception.response_headers = { 'content-type' => 'application/json' } + exception.response_body = '{"error": "Internal server error"}' + exception.request_headers = { 'authorization' => 'Bearer token123' } + exception.request_body = '{"name": "John"}' + end + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status 500 for POST https://api.example.com/users') } + end + + context 'with Faraday::Env missing status key' do + let(:exception) { Faraday::Env.new } + + before do + exception[:body] = 'error body' + # Intentionally not setting status + end + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status for ') } + end + + context 'with Faraday::Env with direct method and url properties' do + let(:exception) { Faraday::Env.new } + + before do + exception.status = 404 + exception.method = :get + exception.url = URI('http://example.com/test') + exception[:body] = 'not found' + end + + it { expect(subject.wrapped_exception).to be_nil } + it { expect(subject.response).to eq(exception) } + it { expect(subject.message).to eq('the server responded with status 404 for GET http://example.com/test') } + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_registry_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_registry_spec.rb new file mode 100644 index 000000000..a8fa7cc79 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_registry_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::MiddlewareRegistry do + before do + stub_const('CustomMiddleware', custom_middleware_klass) + end + let(:custom_middleware_klass) { Class.new(Faraday::Middleware) } + let(:dummy) { Class.new { extend Faraday::MiddlewareRegistry } } + + after { dummy.unregister_middleware(:custom) } + + it 'allows to register with constant' do + dummy.register_middleware(custom: custom_middleware_klass) + expect(dummy.lookup_middleware(:custom)).to eq(custom_middleware_klass) + end + + it 'allows to register with symbol' do + dummy.register_middleware(custom: :CustomMiddleware) + expect(dummy.lookup_middleware(:custom)).to eq(custom_middleware_klass) + end + + it 'allows to register with string' do + dummy.register_middleware(custom: 'CustomMiddleware') + expect(dummy.lookup_middleware(:custom)).to eq(custom_middleware_klass) + end + + it 'allows to register with Proc' do + dummy.register_middleware(custom: -> { custom_middleware_klass }) + expect(dummy.lookup_middleware(:custom)).to eq(custom_middleware_klass) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_spec.rb new file mode 100644 index 000000000..da1d36858 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/middleware_spec.rb @@ -0,0 +1,213 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Middleware do + subject { described_class.new(app) } + let(:app) { double } + + describe 'options' do + context 'when options are passed to the middleware' do + subject { described_class.new(app, options) } + let(:options) { { field: 'value' } } + + it 'accepts options when initialized' do + expect(subject.options[:field]).to eq('value') + end + end + end + + describe '#on_request' do + subject do + Class.new(described_class) do + def on_request(env) + # do nothing + end + end.new(app) + end + + it 'is called by #call' do + expect(app).to receive(:call).and_return(app) + expect(app).to receive(:on_complete) + is_expected.to receive(:call).and_call_original + is_expected.to receive(:on_request) + subject.call(double) + end + end + + describe '#on_error' do + subject do + Class.new(described_class) do + def on_error(error) + # do nothing + end + end.new(app) + end + + it 'is called by #call' do + expect(app).to receive(:call).and_raise(Faraday::ConnectionFailed) + is_expected.to receive(:call).and_call_original + is_expected.to receive(:on_error) + + expect { subject.call(double) }.to raise_error(Faraday::ConnectionFailed) + end + end + + describe '#close' do + context "with app that doesn't support \#close" do + it 'should issue warning' do + is_expected.to receive(:warn) + subject.close + end + end + + context "with app that supports \#close" do + it 'should issue warning' do + expect(app).to receive(:close) + is_expected.to_not receive(:warn) + subject.close + end + end + end + + describe '::default_options' do + let(:subclass_no_options) { FaradayMiddlewareSubclasses::SubclassNoOptions } + let(:subclass_one_option) { FaradayMiddlewareSubclasses::SubclassOneOption } + let(:subclass_two_options) { FaradayMiddlewareSubclasses::SubclassTwoOptions } + + def build_conn(resp_middleware) + Faraday.new do |c| + c.adapter :test do |stub| + stub.get('/success') { [200, {}, 'ok'] } + end + c.response resp_middleware + end + end + + RSpec.shared_context 'reset @default_options' do + before(:each) do + FaradayMiddlewareSubclasses::SubclassNoOptions.instance_variable_set(:@default_options, nil) + FaradayMiddlewareSubclasses::SubclassOneOption.instance_variable_set(:@default_options, nil) + FaradayMiddlewareSubclasses::SubclassTwoOptions.instance_variable_set(:@default_options, nil) + Faraday::Middleware.instance_variable_set(:@default_options, nil) + end + end + + after(:all) do + FaradayMiddlewareSubclasses::SubclassNoOptions.instance_variable_set(:@default_options, nil) + FaradayMiddlewareSubclasses::SubclassOneOption.instance_variable_set(:@default_options, nil) + FaradayMiddlewareSubclasses::SubclassTwoOptions.instance_variable_set(:@default_options, nil) + Faraday::Middleware.instance_variable_set(:@default_options, nil) + end + + context 'with subclass DEFAULT_OPTIONS defined' do + include_context 'reset @default_options' + + context 'and without application options configured' do + let(:resp1) { build_conn(:one_option).get('/success') } + + it 'has only subclass defaults' do + expect(Faraday::Middleware.default_options).to eq(Faraday::Middleware::DEFAULT_OPTIONS) + expect(subclass_no_options.default_options).to eq(subclass_no_options::DEFAULT_OPTIONS) + expect(subclass_one_option.default_options).to eq(subclass_one_option::DEFAULT_OPTIONS) + expect(subclass_two_options.default_options).to eq(subclass_two_options::DEFAULT_OPTIONS) + end + + it { expect(resp1.body).to eq('ok') } + end + + context "and with one application's options changed" do + let(:resp2) { build_conn(:two_options).get('/success') } + + before(:each) do + FaradayMiddlewareSubclasses::SubclassTwoOptions.default_options = { some_option: false } + end + + it 'only updates default options of target subclass' do + expect(Faraday::Middleware.default_options).to eq(Faraday::Middleware::DEFAULT_OPTIONS) + expect(subclass_no_options.default_options).to eq(subclass_no_options::DEFAULT_OPTIONS) + expect(subclass_one_option.default_options).to eq(subclass_one_option::DEFAULT_OPTIONS) + expect(subclass_two_options.default_options).to eq({ some_option: false, some_other_option: false }) + end + + it { expect(resp2.body).to eq('ok') } + end + + context "and with two applications' options changed" do + let(:resp1) { build_conn(:one_option).get('/success') } + let(:resp2) { build_conn(:two_options).get('/success') } + + before(:each) do + FaradayMiddlewareSubclasses::SubclassOneOption.default_options = { some_other_option: true } + FaradayMiddlewareSubclasses::SubclassTwoOptions.default_options = { some_option: false } + end + + it 'updates subclasses and parent independent of each other' do + expect(Faraday::Middleware.default_options).to eq(Faraday::Middleware::DEFAULT_OPTIONS) + expect(subclass_no_options.default_options).to eq(subclass_no_options::DEFAULT_OPTIONS) + expect(subclass_one_option.default_options).to eq({ some_other_option: true }) + expect(subclass_two_options.default_options).to eq({ some_option: false, some_other_option: false }) + end + + it { expect(resp1.body).to eq('ok') } + it { expect(resp2.body).to eq('ok') } + end + end + + context 'with FARADAY::MIDDLEWARE DEFAULT_OPTIONS and with Subclass DEFAULT_OPTIONS' do + before(:each) do + stub_const('Faraday::Middleware::DEFAULT_OPTIONS', { its_magic: false }) + end + + # Must stub Faraday::Middleware::DEFAULT_OPTIONS before resetting default options + include_context 'reset @default_options' + + context 'and without application options configured' do + let(:resp1) { build_conn(:one_option).get('/success') } + + it 'has only subclass defaults' do + expect(Faraday::Middleware.default_options).to eq(Faraday::Middleware::DEFAULT_OPTIONS) + expect(FaradayMiddlewareSubclasses::SubclassNoOptions.default_options).to eq({ its_magic: false }) + expect(FaradayMiddlewareSubclasses::SubclassOneOption.default_options).to eq({ its_magic: false, some_other_option: false }) + expect(FaradayMiddlewareSubclasses::SubclassTwoOptions.default_options).to eq({ its_magic: false, some_option: true, some_other_option: false }) + end + + it { expect(resp1.body).to eq('ok') } + end + + context "and with two applications' options changed" do + let(:resp1) { build_conn(:one_option).get('/success') } + let(:resp2) { build_conn(:two_options).get('/success') } + + before(:each) do + FaradayMiddlewareSubclasses::SubclassOneOption.default_options = { some_other_option: true } + FaradayMiddlewareSubclasses::SubclassTwoOptions.default_options = { some_option: false } + end + + it 'updates subclasses and parent independent of each other' do + expect(Faraday::Middleware.default_options).to eq(Faraday::Middleware::DEFAULT_OPTIONS) + expect(FaradayMiddlewareSubclasses::SubclassNoOptions.default_options).to eq({ its_magic: false }) + expect(FaradayMiddlewareSubclasses::SubclassOneOption.default_options).to eq({ its_magic: false, some_other_option: true }) + expect(FaradayMiddlewareSubclasses::SubclassTwoOptions.default_options).to eq({ its_magic: false, some_option: false, some_other_option: false }) + end + + it { expect(resp1.body).to eq('ok') } + it { expect(resp2.body).to eq('ok') } + end + end + + describe 'default_options input validation' do + include_context 'reset @default_options' + + it 'raises error if Faraday::Middleware option does not exist' do + expect { Faraday::Middleware.default_options = { something_special: true } }.to raise_error(Faraday::InitializationError) do |e| + expect(e.message).to eq('Invalid options provided. Keys not found in Faraday::Middleware::DEFAULT_OPTIONS: something_special') + end + end + + it 'raises error if subclass option does not exist' do + expect { subclass_one_option.default_options = { this_is_a_typo: true } }.to raise_error(Faraday::InitializationError) do |e| + expect(e.message).to eq('Invalid options provided. Keys not found in FaradayMiddlewareSubclasses::SubclassOneOption::DEFAULT_OPTIONS: this_is_a_typo') + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/env_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/env_spec.rb new file mode 100644 index 000000000..006bd5fb3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/env_spec.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Env do + subject(:env) { described_class.new } + + it 'allows to access members' do + expect(env.method).to be_nil + env.method = :get + expect(env.method).to eq(:get) + end + + it 'allows to access symbol non members' do + expect(env[:custom]).to be_nil + env[:custom] = :boom + expect(env[:custom]).to eq(:boom) + end + + it 'allows to access string non members' do + expect(env['custom']).to be_nil + env['custom'] = :boom + expect(env['custom']).to eq(:boom) + end + + it 'ignores false when fetching' do + ssl = Faraday::SSLOptions.new + ssl.verify = false + expect(ssl.fetch(:verify, true)).to be_falsey + end + + it 'handle verify_hostname when fetching' do + ssl = Faraday::SSLOptions.new + ssl.verify_hostname = true + expect(ssl.fetch(:verify_hostname, false)).to be_truthy + end + + it 'retains custom members' do + env[:foo] = 'custom 1' + env[:bar] = :custom2 + env2 = Faraday::Env.from(env) + env2[:baz] = 'custom 3' + + expect(env2[:foo]).to eq('custom 1') + expect(env2[:bar]).to eq(:custom2) + expect(env[:baz]).to be_nil + end + + describe '#body' do + subject(:env) { described_class.from(body: { foo: 'bar' }) } + + context 'when response is not finished yet' do + it 'returns the request body' do + expect(env.body).to eq(foo: 'bar') + end + end + + context 'when response is finished' do + before do + env.status = 200 + env.body = { bar: 'foo' } + env.response = Faraday::Response.new(env) + end + + it 'returns the response body' do + expect(env.body).to eq(bar: 'foo') + end + + it 'allows to access request_body' do + expect(env.request_body).to eq(foo: 'bar') + end + + it 'allows to access response_body' do + expect(env.response_body).to eq(bar: 'foo') + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/options_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/options_spec.rb new file mode 100644 index 000000000..fc0b117a8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/options_spec.rb @@ -0,0 +1,297 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Options do + SubOptions = Class.new(Faraday::Options.new(:sub_a, :sub_b)) + ParentOptions = Faraday::Options.new(:a, :b, :c) do + options c: SubOptions + end + + describe '#merge' do + it 'merges options with hashes' do + options = ParentOptions.new(1) + expect(options.a).to eq(1) + expect(options.b).to be_nil + + dup = options.merge a: 2, b: 3 + expect(dup.a).to eq(2) + expect(dup.b).to eq(3) + expect(options.a).to eq(1) + expect(options.b).to be_nil + end + + it 'deeply merges two options' do + sub_opts1 = SubOptions.from(sub_a: 3) + sub_opts2 = SubOptions.from(sub_b: 4) + opt1 = ParentOptions.from(a: 1, c: sub_opts1) + opt2 = ParentOptions.from(b: 2, c: sub_opts2) + + merged = opt1.merge(opt2) + + expected_sub_opts = SubOptions.from(sub_a: 3, sub_b: 4) + expected = ParentOptions.from(a: 1, b: 2, c: expected_sub_opts) + expect(merged).to eq(expected) + end + + it 'deeply merges options with hashes' do + sub_opts1 = SubOptions.from(sub_a: 3) + sub_opts2 = { sub_b: 4 } + opt1 = ParentOptions.from(a: 1, c: sub_opts1) + opt2 = { b: 2, c: sub_opts2 } + + merged = opt1.merge(opt2) + + expected_sub_opts = SubOptions.from(sub_a: 3, sub_b: 4) + expected = ParentOptions.from(a: 1, b: 2, c: expected_sub_opts) + expect(merged).to eq(expected) + end + + it 'deeply merges options with nil' do + sub_opts = SubOptions.new(3, 4) + options = ParentOptions.new(1, 2, sub_opts) + expect(options.a).to eq(1) + expect(options.b).to eq(2) + expect(options.c.sub_a).to eq(3) + expect(options.c.sub_b).to eq(4) + + options2 = ParentOptions.from(b: 5, c: nil) + + merged = options.merge(options2) + + expect(merged.b).to eq(5) + expect(merged.c).to eq(sub_opts) + end + + it 'deeply merges options with options having nil sub-options' do + options = ParentOptions.from(a: 1) + + sub_opts = SubOptions.new(3, 4) + options2 = ParentOptions.from(b: 2, c: sub_opts) + + expect(options.a).to eq(1) + expect(options2.b).to eq(2) + expect(options2.c.sub_a).to eq(3) + expect(options2.c.sub_b).to eq(4) + + merged = options.merge(options2) + + expect(merged.c).to eq(sub_opts) + end + + describe '#dup' do + it 'duplicate options but not sub-options' do + sub_opts = SubOptions.from(sub_a: 3) + opts = ParentOptions.from(b: 1, c: sub_opts) + + duped = opts.dup + duped.b = 2 + duped.c.sub_a = 4 + + expect(opts.b).to eq(1) + expect(opts.c.sub_a).to eq(4) + end + end + + describe '#deep_dup' do + it 'duplicate options and also suboptions' do + sub_opts = SubOptions.from(sub_a: 3) + opts = ParentOptions.from(b: 1, c: sub_opts) + + duped = opts.deep_dup + duped.b = 2 + duped.c.sub_a = 4 + + expect(opts.b).to eq(1) + expect(opts.c.sub_a).to eq(3) + end + end + + describe '#clear' do + it 'clears the options' do + options = SubOptions.new(1) + expect(options.empty?).not_to be_truthy + options.clear + expect(options.empty?).to be_truthy + end + end + + describe '#empty?' do + it 'returns true only if all options are nil' do + options = SubOptions.new + expect(options.empty?).to be_truthy + options.sub_a = 1 + expect(options.empty?).not_to be_truthy + options.delete(:sub_a) + expect(options.empty?).to be_truthy + end + end + + describe '#each_key' do + it 'allows to iterate through keys' do + options = ParentOptions.new(1, 2, 3) + enum = options.each_key + expect(enum.next.to_sym).to eq(:a) + expect(enum.next.to_sym).to eq(:b) + expect(enum.next.to_sym).to eq(:c) + end + end + + describe '#key?' do + it 'returns true if the key exists and is not nil' do + options = SubOptions.new + expect(options.key?(:sub_a)).not_to be_truthy + options.sub_a = 1 + expect(options.key?(:sub_a)).to be_truthy + end + end + + describe '#each_value' do + it 'allows to iterate through values' do + options = ParentOptions.new(1, 2, 3) + enum = options.each_value + expect(enum.next).to eq(1) + expect(enum.next).to eq(2) + expect(enum.next).to eq(3) + end + end + + describe '#value?' do + it 'returns true if any key has that value' do + options = SubOptions.new + expect(options.value?(1)).not_to be_truthy + options.sub_a = 1 + expect(options.value?(1)).to be_truthy + end + end + + describe '#update' do + it 'updates options from hashes' do + options = ParentOptions.new(1) + expect(options.a).to eq(1) + expect(options.b).to be_nil + + updated = options.update a: 2, b: 3 + expect(options.a).to eq(2) + expect(options.b).to eq(3) + expect(updated).to eq(options) + end + end + + describe '#delete' do + it 'allows to remove value for key' do + options = ParentOptions.new(1) + expect(options.a).to eq(1) + expect(options.delete(:a)).to eq(1) + expect(options.a).to be_nil + end + end + + describe '#from' do + it { expect { ParentOptions.from invalid: 1 }.to raise_error(NoMethodError) } + + it 'works with options' do + options = ParentOptions.new(1) + + value = ParentOptions.from(options) + expect(value.a).to eq(1) + expect(value.b).to be_nil + end + + it 'works with options with sub object' do + sub = SubOptions.new(1) + options = ParentOptions.from a: 1, c: sub + expect(options).to be_a_kind_of(ParentOptions) + expect(options.a).to eq(1) + expect(options.b).to be_nil + expect(options.c).to be_a_kind_of(SubOptions) + expect(options.c.sub_a).to eq(1) + end + + it 'works with hash' do + options = ParentOptions.from a: 1 + expect(options).to be_a_kind_of(ParentOptions) + expect(options.a).to eq(1) + expect(options.b).to be_nil + end + + it 'works with hash with sub object' do + options = ParentOptions.from a: 1, c: { sub_a: 1 } + expect(options).to be_a_kind_of(ParentOptions) + expect(options.a).to eq(1) + expect(options.b).to be_nil + expect(options.c).to be_a_kind_of(SubOptions) + expect(options.c.sub_a).to eq(1) + end + + it 'works with deep hash' do + hash = { b: 1 } + options = ParentOptions.from a: hash + expect(options.a[:b]).to eq(1) + + hash[:b] = 2 + expect(options.a[:b]).to eq(1) + + options.a[:b] = 3 + expect(hash[:b]).to eq(2) + expect(options.a[:b]).to eq(3) + end + + it 'works with nil' do + options = ParentOptions.from(nil) + expect(options).to be_a_kind_of(ParentOptions) + expect(options.a).to be_nil + expect(options.b).to be_nil + end + + it 'respects inheritance' do + subclass = Class.new(ParentOptions) + options = subclass.from(c: { sub_a: 'hello' }) + expect(options.c).to be_a_kind_of(SubOptions) + expect(options.c.sub_a).to eq('hello') + end + end + + describe '#memoized' do + subject(:options_class) { Class.new(ParentOptions) } + it 'requires block' do + expect { options_class.memoized(:a) }.to raise_error(ArgumentError) + end + + it 'accepts block' do + options_class.memoized(:a) { :foo } + expect(options_class.new.a).to eql(:foo) + end + end + + describe '#fetch' do + subject { SubOptions.new } + + context 'when the fetched key has no value' do + it 'uses falsey default' do + expect(subject.fetch(:sub_a, false) { |_| :blah }).to be_falsey + end + + it 'accepts block' do + expect(subject.fetch(:sub_a) { |k| "yo #{k.inspect}" }).to eq('yo :sub_a') + end + + it 'needs a default if key is missing' do + expect { subject.fetch(:sub_a) }.to raise_error(Faraday::Options.fetch_error_class) + end + end + + context 'when the fetched key has a value' do + before do + subject.sub_a = 1 + end + + it 'grabs value' do + expect(subject.fetch(:sub_a, false) { |_| :blah }).to eq(1) + end + + it 'works with key' do + expect(subject.fetch(:sub_a)).to eq(1) + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/proxy_options_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/proxy_options_spec.rb new file mode 100644 index 000000000..15203edb3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/proxy_options_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::ProxyOptions do + describe '#from' do + it 'works with string' do + options = Faraday::ProxyOptions.from 'http://user:pass@example.org' + expect(options.user).to eq('user') + expect(options.password).to eq('pass') + expect(options.uri).to be_a_kind_of(URI) + expect(options.path).to eq('') + expect(options.port).to eq(80) + expect(options.host).to eq('example.org') + expect(options.scheme).to eq('http') + expect(options.inspect).to match('#') + end + + it 'works with hash' do + hash = { user: 'user', password: 'pass', uri: 'http://@example.org' } + options = Faraday::ProxyOptions.from(hash) + expect(options.user).to eq('user') + expect(options.password).to eq('pass') + expect(options.uri).to be_a_kind_of(URI) + expect(options.path).to eq('') + expect(options.port).to eq(80) + expect(options.host).to eq('example.org') + expect(options.scheme).to eq('http') + expect(options.inspect).to match('# empty string + options = Faraday::ProxyOptions.from proxy_string + expect(options).to be_a_kind_of(Faraday::ProxyOptions) + expect(options.inspect).to eq('#') + end + end + + it 'allows hash access' do + proxy = Faraday::ProxyOptions.from 'http://a%40b:pw%20d@example.org' + expect(proxy.user).to eq('a@b') + expect(proxy[:user]).to eq('a@b') + expect(proxy.password).to eq('pw d') + expect(proxy[:password]).to eq('pw d') + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/request_options_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/request_options_spec.rb new file mode 100644 index 000000000..8c1bb9921 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/options/request_options_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::RequestOptions do + subject(:options) { Faraday::RequestOptions.new } + + it 'allows to set the request proxy' do + expect(options.proxy).to be_nil + + expect { options[:proxy] = { booya: 1 } }.to raise_error(NoMethodError) + + options[:proxy] = { user: 'user' } + expect(options.proxy).to be_a_kind_of(Faraday::ProxyOptions) + expect(options.proxy.user).to eq('user') + + options.proxy = nil + expect(options.proxy).to be_nil + expect(options.inspect).to eq('#') + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/flat_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/flat_spec.rb new file mode 100644 index 000000000..115342e53 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/flat_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'rack/utils' + +RSpec.describe Faraday::FlatParamsEncoder do + it_behaves_like 'a params encoder' + + it 'decodes arrays' do + query = 'a=one&a=two&a=three' + expected = { 'a' => %w[one two three] } + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes boolean values' do + query = 'a=true&b=false' + expected = { 'a' => 'true', 'b' => 'false' } + expect(subject.decode(query)).to eq(expected) + end + + it 'encodes boolean values' do + params = { a: true, b: false } + expect(subject.encode(params)).to eq('a=true&b=false') + end + + it 'encodes boolean values in array' do + params = { a: [true, false] } + expect(subject.encode(params)).to eq('a=true&a=false') + end + + it 'encodes empty array in hash' do + params = { a: [] } + expect(subject.encode(params)).to eq('a=') + end + + it 'encodes unsorted when asked' do + params = { b: false, a: true } + expect(subject.encode(params)).to eq('a=true&b=false') + Faraday::FlatParamsEncoder.sort_params = false + expect(subject.encode(params)).to eq('b=false&a=true') + Faraday::FlatParamsEncoder.sort_params = true + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/nested_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/nested_spec.rb new file mode 100644 index 000000000..83da22dc4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/params_encoders/nested_spec.rb @@ -0,0 +1,151 @@ +# frozen_string_literal: true + +require 'rack/utils' + +RSpec.describe Faraday::NestedParamsEncoder do + it_behaves_like 'a params encoder' + + it 'decodes arrays' do + query = 'a[1]=one&a[2]=two&a[3]=three' + expected = { 'a' => %w[one two three] } + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes hashes' do + query = 'a[b1]=one&a[b2]=two&a[b][c]=foo' + expected = { 'a' => { 'b1' => 'one', 'b2' => 'two', 'b' => { 'c' => 'foo' } } } + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested arrays rack compat' do + query = 'a[][one]=1&a[][two]=2&a[][one]=3&a[][two]=4' + expected = Rack::Utils.parse_nested_query(query) + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested array mixed types' do + query = 'a[][one]=1&a[]=2&a[]=&a[]' + expected = Rack::Utils.parse_nested_query(query) + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested ignores invalid array' do + query = '[][a]=1&b=2' + expected = { 'a' => '1', 'b' => '2' } + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested ignores repeated array notation' do + query = 'a[][][]=1' + expected = { 'a' => ['1'] } + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested ignores malformed keys' do + query = '=1&[]=2' + expected = {} + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested subkeys dont have to be in brackets' do + query = 'a[b]c[d]e=1' + expected = { 'a' => { 'b' => { 'c' => { 'd' => { 'e' => '1' } } } } } + expect(subject.decode(query)).to eq(expected) + end + + it 'decodes nested final value overrides any type' do + query = 'a[b][c]=1&a[b]=2' + expected = { 'a' => { 'b' => '2' } } + expect(subject.decode(query)).to eq(expected) + end + + it 'encodes rack compat' do + params = { a: [{ one: '1', two: '2' }, '3', ''] } + result = Faraday::Utils.unescape(Faraday::NestedParamsEncoder.encode(params)).split('&') + escaped = Rack::Utils.build_nested_query(params) + expected = Rack::Utils.unescape(escaped).split('&') + expect(result).to match_array(expected) + end + + it 'encodes empty string array value' do + expected = 'baz=&foo%5Bbar%5D=' + result = Faraday::NestedParamsEncoder.encode(foo: { bar: '' }, baz: '') + expect(result).to eq(expected) + end + + it 'encodes nil array value' do + expected = 'baz&foo%5Bbar%5D' + result = Faraday::NestedParamsEncoder.encode(foo: { bar: nil }, baz: nil) + expect(result).to eq(expected) + end + + it 'encodes empty array value' do + expected = 'baz%5B%5D&foo%5Bbar%5D%5B%5D' + result = Faraday::NestedParamsEncoder.encode(foo: { bar: [] }, baz: []) + expect(result).to eq(expected) + end + + it 'encodes boolean values' do + params = { a: true, b: false } + expect(subject.encode(params)).to eq('a=true&b=false') + end + + it 'encodes boolean values in array' do + params = { a: [true, false] } + expect(subject.encode(params)).to eq('a%5B%5D=true&a%5B%5D=false') + end + + it 'encodes unsorted when asked' do + params = { b: false, a: true } + expect(subject.encode(params)).to eq('a=true&b=false') + Faraday::NestedParamsEncoder.sort_params = false + expect(subject.encode(params)).to eq('b=false&a=true') + Faraday::NestedParamsEncoder.sort_params = true + end + + it 'encodes arrays indices when asked' do + params = { a: [0, 1, 2] } + expect(subject.encode(params)).to eq('a%5B%5D=0&a%5B%5D=1&a%5B%5D=2') + Faraday::NestedParamsEncoder.array_indices = true + expect(subject.encode(params)).to eq('a%5B0%5D=0&a%5B1%5D=1&a%5B2%5D=2') + Faraday::NestedParamsEncoder.array_indices = false + end + + shared_examples 'a wrong decoding' do + it do + expect { subject.decode(query) }.to raise_error(TypeError) do |e| + expect(e.message).to eq(error_message) + end + end + end + + context 'when expecting hash but getting string' do + let(:query) { 'a=1&a[b]=2' } + let(:error_message) { "expected Hash (got String) for param `a'" } + it_behaves_like 'a wrong decoding' + end + + context 'when expecting hash but getting array' do + let(:query) { 'a[]=1&a[b]=2' } + let(:error_message) { "expected Hash (got Array) for param `a'" } + it_behaves_like 'a wrong decoding' + end + + context 'when expecting nested hash but getting non nested' do + let(:query) { 'a[b]=1&a[b][c]=2' } + let(:error_message) { "expected Hash (got String) for param `b'" } + it_behaves_like 'a wrong decoding' + end + + context 'when expecting array but getting hash' do + let(:query) { 'a[b]=1&a[]=2' } + let(:error_message) { "expected Array (got Hash) for param `a'" } + it_behaves_like 'a wrong decoding' + end + + context 'when expecting array but getting string' do + let(:query) { 'a=1&a[]=2' } + let(:error_message) { "expected Array (got String) for param `a'" } + it_behaves_like 'a wrong decoding' + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/rack_builder_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/rack_builder_spec.rb new file mode 100644 index 000000000..89f17ca96 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/rack_builder_spec.rb @@ -0,0 +1,317 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::RackBuilder do + # mock handler classes + (Handler = Struct.new(:app)).class_eval do + def call(env) + env[:request_headers]['X-Middleware'] ||= '' + env[:request_headers]['X-Middleware'] += ":#{self.class.name.split('::').last}" + app.call(env) + end + end + + class Apple < Handler + end + + class Orange < Handler + end + + class Banana < Handler + end + + subject { conn.builder } + before { Faraday.default_adapter = :test } + after { Faraday.default_adapter = nil } + + context 'with default stack' do + let(:conn) { Faraday::Connection.new } + + it { expect(subject[0]).to eq(Faraday::Request.lookup_middleware(:url_encoded)) } + it { expect(subject.adapter).to eq(Faraday::Adapter.lookup_middleware(Faraday.default_adapter)) } + end + + context 'with custom empty block' do + let(:conn) { Faraday::Connection.new {} } + + it { expect(subject[0]).to be_nil } + it { expect(subject.adapter).to eq(Faraday::Adapter.lookup_middleware(Faraday.default_adapter)) } + end + + context 'with custom adapter only' do + let(:conn) do + Faraday::Connection.new do |builder| + builder.adapter :test do |stub| + stub.get('/') { |_| [200, {}, ''] } + end + end + end + + it { expect(subject[0]).to be_nil } + it { expect(subject.adapter).to eq(Faraday::Adapter.lookup_middleware(:test)) } + end + + context 'with custom handler and adapter' do + let(:conn) do + Faraday::Connection.new do |builder| + builder.use Apple + builder.adapter :test do |stub| + stub.get('/') { |_| [200, {}, ''] } + end + end + end + + it 'locks the stack after making a request' do + expect(subject.locked?).to be_falsey + conn.get('/') + expect(subject.locked?).to be_truthy + expect { subject.use(Orange) }.to raise_error(Faraday::RackBuilder::StackLocked) + end + + it 'dup stack is unlocked' do + expect(subject.locked?).to be_falsey + subject.lock! + expect(subject.locked?).to be_truthy + dup = subject.dup + expect(dup).to eq(subject) + expect(dup.locked?).to be_falsey + end + + it 'allows to compare handlers' do + expect(subject.handlers.first).to eq(Faraday::RackBuilder::Handler.new(Apple)) + end + end + + context 'when having a single handler' do + let(:conn) { Faraday::Connection.new {} } + + before { subject.use(Apple) } + + it { expect(subject.handlers).to eq([Apple]) } + + it 'allows use' do + subject.use(Orange) + expect(subject.handlers).to eq([Apple, Orange]) + end + + it 'allows insert_before' do + subject.insert_before(Apple, Orange) + expect(subject.handlers).to eq([Orange, Apple]) + end + + it 'allows insert_after' do + subject.insert_after(Apple, Orange) + expect(subject.handlers).to eq([Apple, Orange]) + end + + it 'raises an error trying to use an unregistered symbol' do + expect { subject.use(:apple) }.to raise_error(Faraday::Error) do |err| + expect(err.message).to eq(':apple is not registered on Faraday::Middleware') + end + end + end + + context 'when having two handlers' do + let(:conn) { Faraday::Connection.new {} } + + before do + subject.use(Apple) + subject.use(Orange) + end + + it 'allows insert_before' do + subject.insert_before(Orange, Banana) + expect(subject.handlers).to eq([Apple, Banana, Orange]) + end + + it 'allows insert_after' do + subject.insert_after(Apple, Banana) + expect(subject.handlers).to eq([Apple, Banana, Orange]) + end + + it 'allows to swap handlers' do + subject.swap(Apple, Banana) + expect(subject.handlers).to eq([Banana, Orange]) + end + + it 'allows to delete a handler' do + subject.delete(Apple) + expect(subject.handlers).to eq([Orange]) + end + end + + context 'when adapter is added with named options' do + after { Faraday.default_adapter_options = {} } + let(:conn) { Faraday::Connection.new {} } + + let(:cat_adapter) do + Class.new(Faraday::Adapter) do + attr_accessor :name + + def initialize(app, name:) + super(app) + @name = name + end + end + end + + let(:cat) { subject.adapter.build } + + it 'adds a handler to construct adapter with named options' do + Faraday.default_adapter = cat_adapter + Faraday.default_adapter_options = { name: 'Chloe' } + expect { cat }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(cat.name).to eq 'Chloe' + end + end + + context 'when middleware is added with named arguments' do + let(:conn) { Faraday::Connection.new {} } + + let(:dog_middleware) do + Class.new(Faraday::Middleware) do + attr_accessor :name + + def initialize(app, name:) + super(app) + @name = name + end + end + end + let(:dog) do + subject.handlers.find { |handler| handler == dog_middleware }.build + end + + it 'adds a handler to construct middleware with options passed to use' do + subject.use dog_middleware, name: 'Rex' + expect { dog }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(dog.name).to eq('Rex') + end + end + + context 'when a middleware is added with named arguments' do + let(:conn) { Faraday::Connection.new {} } + + let(:cat_request) do + Class.new(Faraday::Middleware) do + attr_accessor :name + + def initialize(app, name:) + super(app) + @name = name + end + end + end + let(:cat) do + subject.handlers.find { |handler| handler == cat_request }.build + end + + it 'adds a handler to construct request adapter with options passed to request' do + Faraday::Request.register_middleware cat_request: cat_request + subject.request :cat_request, name: 'Felix' + expect { cat }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(cat.name).to eq('Felix') + end + end + + context 'when a middleware is added with named arguments' do + let(:conn) { Faraday::Connection.new {} } + + let(:fish_response) do + Class.new(Faraday::Middleware) do + attr_accessor :name + + def initialize(app, name:) + super(app) + @name = name + end + end + end + let(:fish) do + subject.handlers.find { |handler| handler == fish_response }.build + end + + it 'adds a handler to construct response adapter with options passed to response' do + Faraday::Response.register_middleware fish_response: fish_response + subject.response :fish_response, name: 'Bubbles' + expect { fish }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(fish.name).to eq('Bubbles') + end + end + + context 'when a plain adapter is added with named arguments' do + let(:conn) { Faraday::Connection.new {} } + + let(:rabbit_adapter) do + Class.new(Faraday::Adapter) do + attr_accessor :name + + def initialize(app, name:) + super(app) + @name = name + end + end + end + let(:rabbit) do + subject.adapter.build + end + + it 'adds a handler to construct adapter with options passed to adapter' do + Faraday::Adapter.register_middleware rabbit_adapter: rabbit_adapter + subject.adapter :rabbit_adapter, name: 'Thumper' + expect { rabbit }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(rabbit.name).to eq('Thumper') + end + end + + context 'when handlers are directly added or updated' do + let(:conn) { Faraday::Connection.new {} } + + let(:rock_handler) do + Class.new do + attr_accessor :name + + def initialize(_app, name:) + @name = name + end + end + end + let(:rock) do + subject.handlers.find { |handler| handler == rock_handler }.build + end + + it 'adds a handler to construct adapter with options passed to insert' do + subject.insert 0, rock_handler, name: 'Stony' + expect { rock }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(rock.name).to eq('Stony') + end + + it 'adds a handler with options passed to insert_after' do + subject.insert_after 0, rock_handler, name: 'Rocky' + expect { rock }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(rock.name).to eq('Rocky') + end + + it 'adds a handler with options passed to swap' do + subject.insert 0, rock_handler, name: 'Flint' + subject.swap 0, rock_handler, name: 'Chert' + expect { rock }.to_not output( + /warning: Using the last argument as keyword parameters is deprecated/ + ).to_stderr + expect(rock.name).to eq('Chert') + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/authorization_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/authorization_spec.rb new file mode 100644 index 000000000..437c88ae0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/authorization_spec.rb @@ -0,0 +1,118 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Request::Authorization do + let(:conn) do + Faraday.new do |b| + b.request :authorization, auth_type, *auth_config + b.adapter :test do |stub| + stub.get('/auth-echo') do |env| + [200, {}, env[:request_headers]['Authorization']] + end + end + end + end + + shared_examples 'does not interfere with existing authentication' do + context 'and request already has an authentication header' do + let(:response) { conn.get('/auth-echo', nil, authorization: 'OAuth oauth_token') } + + it 'does not interfere with existing authorization' do + expect(response.body).to eq('OAuth oauth_token') + end + end + end + + let(:response) { conn.get('/auth-echo') } + + describe 'basic_auth' do + let(:auth_type) { :basic } + + context 'when passed correct params' do + let(:auth_config) { %w[aladdin opensesame] } + + it { expect(response.body).to eq('Basic YWxhZGRpbjpvcGVuc2VzYW1l') } + + include_examples 'does not interfere with existing authentication' + end + + context 'when passed very long values' do + let(:auth_config) { ['A' * 255, ''] } + + it { expect(response.body).to eq("Basic #{'QUFB' * 85}Og==") } + + include_examples 'does not interfere with existing authentication' + end + end + + describe 'authorization' do + let(:auth_type) { :Bearer } + + context 'when passed a string' do + let(:auth_config) { ['custom'] } + + it { expect(response.body).to eq('Bearer custom') } + + include_examples 'does not interfere with existing authentication' + end + + context 'when passed a proc' do + let(:auth_config) { [-> { 'custom_from_proc' }] } + + it { expect(response.body).to eq('Bearer custom_from_proc') } + + include_examples 'does not interfere with existing authentication' + end + + context 'when passed a callable' do + let(:callable) { double('Callable Authorizer', call: 'custom_from_callable') } + let(:auth_config) { [callable] } + + it { expect(response.body).to eq('Bearer custom_from_callable') } + + include_examples 'does not interfere with existing authentication' + end + + context 'with an argument' do + let(:response) { conn.get('/auth-echo', nil, 'middle' => 'crunchy surprise') } + + context 'when passed a proc' do + let(:auth_config) { [proc { |env| "proc #{env.request_headers['middle']}" }] } + + it { expect(response.body).to eq('Bearer proc crunchy surprise') } + + include_examples 'does not interfere with existing authentication' + end + + context 'when passed a lambda' do + let(:auth_config) { [->(env) { "lambda #{env.request_headers['middle']}" }] } + + it { expect(response.body).to eq('Bearer lambda crunchy surprise') } + + include_examples 'does not interfere with existing authentication' + end + + context 'when passed a callable with an argument' do + let(:callable) do + Class.new do + def call(env) + "callable #{env.request_headers['middle']}" + end + end.new + end + let(:auth_config) { [callable] } + + it { expect(response.body).to eq('Bearer callable crunchy surprise') } + + include_examples 'does not interfere with existing authentication' + end + end + + context 'when passed too many arguments' do + let(:auth_config) { %w[baz foo] } + + it { expect { response }.to raise_error(ArgumentError) } + + include_examples 'does not interfere with existing authentication' + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/instrumentation_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/instrumentation_spec.rb new file mode 100644 index 000000000..d207c5568 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/instrumentation_spec.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Request::Instrumentation do + class FakeInstrumenter + attr_reader :instrumentations + + def initialize + @instrumentations = [] + end + + def instrument(name, env) + @instrumentations << [name, env] + yield + end + end + + let(:config) { {} } + let(:options) { Faraday::Request::Instrumentation::Options.from config } + let(:instrumenter) { FakeInstrumenter.new } + let(:conn) do + Faraday.new do |f| + f.request :instrumentation, config.merge(instrumenter: instrumenter) + f.adapter :test do |stub| + stub.get '/' do + [200, {}, 'ok'] + end + end + end + end + + it { expect(options.name).to eq('request.faraday') } + it 'defaults to ActiveSupport::Notifications' do + res = options.instrumenter + rescue NameError => e + expect(e.to_s).to match('ActiveSupport') + else + expect(res).to eq(ActiveSupport::Notifications) + end + + it 'instruments with default name' do + expect(instrumenter.instrumentations.size).to eq(0) + + res = conn.get '/' + expect(res.body).to eq('ok') + expect(instrumenter.instrumentations.size).to eq(1) + + name, env = instrumenter.instrumentations.first + expect(name).to eq('request.faraday') + expect(env[:url].path).to eq('/') + end + + context 'with custom name' do + let(:config) { { name: 'custom' } } + + it { expect(options.name).to eq('custom') } + it 'instruments with custom name' do + expect(instrumenter.instrumentations.size).to eq(0) + + res = conn.get '/' + expect(res.body).to eq('ok') + expect(instrumenter.instrumentations.size).to eq(1) + + name, env = instrumenter.instrumentations.first + expect(name).to eq('custom') + expect(env[:url].path).to eq('/') + end + end + + context 'with custom instrumenter' do + let(:config) { { instrumenter: :custom } } + + it { expect(options.instrumenter).to eq(:custom) } + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/json_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/json_spec.rb new file mode 100644 index 000000000..44dee7963 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/json_spec.rb @@ -0,0 +1,199 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Request::Json do + let(:middleware) { described_class.new(->(env) { Faraday::Response.new(env) }) } + + def process(body, content_type = nil) + env = { body: body, request_headers: Faraday::Utils::Headers.new } + env[:request_headers]['content-type'] = content_type if content_type + middleware.call(Faraday::Env.from(env)).env + end + + def result_body + result[:body] + end + + def result_type + result[:request_headers]['content-type'] + end + + context 'no body' do + let(:result) { process(nil) } + + it "doesn't change body" do + expect(result_body).to be_nil + end + + it "doesn't add content type" do + expect(result_type).to be_nil + end + end + + context 'empty body' do + let(:result) { process('') } + + it "doesn't change body" do + expect(result_body).to be_empty + end + + it "doesn't add content type" do + expect(result_type).to be_nil + end + end + + context 'string body' do + let(:result) { process('{"a":1}') } + + it "doesn't change body" do + expect(result_body).to eq('{"a":1}') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + + context 'object body' do + let(:result) { process(a: 1) } + + it 'encodes body' do + expect(result_body).to eq('{"a":1}') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + + context 'empty object body' do + let(:result) { process({}) } + + it 'encodes body' do + expect(result_body).to eq('{}') + end + end + + context 'true body' do + let(:result) { process(true) } + + it 'encodes body' do + expect(result_body).to eq('true') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + + context 'false body' do + let(:result) { process(false) } + + it 'encodes body' do + expect(result_body).to eq('false') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + + context 'object body with json type' do + let(:result) { process({ a: 1 }, 'application/json; charset=utf-8') } + + it 'encodes body' do + expect(result_body).to eq('{"a":1}') + end + + it "doesn't change content type" do + expect(result_type).to eq('application/json; charset=utf-8') + end + end + + context 'object body with vendor json type' do + let(:result) { process({ a: 1 }, 'application/vnd.myapp.v1+json; charset=utf-8') } + + it 'encodes body' do + expect(result_body).to eq('{"a":1}') + end + + it "doesn't change content type" do + expect(result_type).to eq('application/vnd.myapp.v1+json; charset=utf-8') + end + end + + context 'object body with incompatible type' do + let(:result) { process({ a: 1 }, 'application/xml; charset=utf-8') } + + it "doesn't change body" do + expect(result_body).to eq(a: 1) + end + + it "doesn't change content type" do + expect(result_type).to eq('application/xml; charset=utf-8') + end + end + + context 'with encoder' do + let(:encoder) do + double('Encoder').tap do |e| + allow(e).to receive(:dump) { |s, opts| JSON.generate(s, opts) } + end + end + + let(:result) { process(a: 1) } + + context 'when encoder is passed as object' do + let(:middleware) { described_class.new(->(env) { Faraday::Response.new(env) }, { encoder: encoder }) } + + it 'calls specified JSON encoder\'s dump method' do + expect(encoder).to receive(:dump).with({ a: 1 }) + + result + end + + it 'encodes body' do + expect(result_body).to eq('{"a":1}') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + + context 'when encoder is passed as an object-method pair' do + let(:middleware) { described_class.new(->(env) { Faraday::Response.new(env) }, { encoder: [encoder, :dump] }) } + + it 'calls specified JSON encoder' do + expect(encoder).to receive(:dump).with({ a: 1 }) + + result + end + + it 'encodes body' do + expect(result_body).to eq('{"a":1}') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + + context 'when encoder is not passed' do + let(:middleware) { described_class.new(->(env) { Faraday::Response.new(env) }) } + + it 'calls JSON.generate' do + expect(JSON).to receive(:generate).with({ a: 1 }) + + result + end + + it 'encodes body' do + expect(result_body).to eq('{"a":1}') + end + + it 'adds content type' do + expect(result_type).to eq('application/json') + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/url_encoded_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/url_encoded_spec.rb new file mode 100644 index 000000000..bdd9e0ac0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request/url_encoded_spec.rb @@ -0,0 +1,93 @@ +# frozen_string_literal: true + +require 'stringio' + +RSpec.describe Faraday::Request::UrlEncoded do + let(:conn) do + Faraday.new do |b| + b.request :url_encoded + b.adapter :test do |stub| + stub.post('/echo') do |env| + posted_as = env[:request_headers]['Content-Type'] + body = env[:body] + if body.respond_to?(:read) + body = body.read + end + [200, { 'Content-Type' => posted_as }, body] + end + end + end + end + + it 'does nothing without payload' do + response = conn.post('/echo') + expect(response.headers['Content-Type']).to be_nil + expect(response.body.empty?).to be_truthy + end + + it 'ignores custom content type' do + response = conn.post('/echo', { some: 'data' }, 'content-type' => 'application/x-foo') + expect(response.headers['Content-Type']).to eq('application/x-foo') + expect(response.body).to eq(some: 'data') + end + + it 'works with no headers' do + response = conn.post('/echo', fruit: %w[apples oranges]) + expect(response.headers['Content-Type']).to eq('application/x-www-form-urlencoded') + expect(response.body).to eq('fruit%5B%5D=apples&fruit%5B%5D=oranges') + end + + it 'works with with headers' do + response = conn.post('/echo', { 'a' => 123 }, 'content-type' => 'application/x-www-form-urlencoded') + expect(response.headers['Content-Type']).to eq('application/x-www-form-urlencoded') + expect(response.body).to eq('a=123') + end + + it 'works with nested params' do + response = conn.post('/echo', user: { name: 'Mislav', web: 'mislav.net' }) + expect(response.headers['Content-Type']).to eq('application/x-www-form-urlencoded') + expected = { 'user' => { 'name' => 'Mislav', 'web' => 'mislav.net' } } + expect(Faraday::Utils.parse_nested_query(response.body)).to eq(expected) + end + + it 'works with non nested params' do + response = conn.post('/echo', dimensions: %w[date location]) do |req| + req.options.params_encoder = Faraday::FlatParamsEncoder + end + expect(response.headers['Content-Type']).to eq('application/x-www-form-urlencoded') + expected = { 'dimensions' => %w[date location] } + expect(Faraday::Utils.parse_query(response.body)).to eq(expected) + expect(response.body).to eq('dimensions=date&dimensions=location') + end + + it 'works with unicode' do + err = capture_warnings do + response = conn.post('/echo', str: 'eé cç aã aâ') + expect(response.body).to eq('str=e%C3%A9+c%C3%A7+a%C3%A3+a%C3%A2') + end + expect(err.empty?).to be_truthy + end + + it 'works with nested keys' do + response = conn.post('/echo', 'a' => { 'b' => { 'c' => ['d'] } }) + expect(response.body).to eq('a%5Bb%5D%5Bc%5D%5B%5D=d') + end + + it 'works with files' do + response = conn.post('/echo', StringIO.new('str=apple')) + expect(response.body).to eq('str=apple') + end + + context 'customising default_space_encoding' do + around do |example| + Faraday::Utils.default_space_encoding = '%20' + example.run + Faraday::Utils.default_space_encoding = nil + end + + it 'uses the custom character to encode spaces' do + response = conn.post('/echo', str: 'apple banana') + expect(response.body).to eq('str=apple%20banana') + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request_spec.rb new file mode 100644 index 000000000..fbf85b56f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/request_spec.rb @@ -0,0 +1,110 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Request do + let(:conn) do + Faraday.new(url: 'http://httpbingo.org/api', + headers: { 'Mime-Version' => '1.0' }, + request: { oauth: { consumer_key: 'anonymous' } }) + end + let(:http_method) { :get } + let(:block) { nil } + + subject { conn.build_request(http_method, &block) } + + context 'when nothing particular is configured' do + it { expect(subject.http_method).to eq(:get) } + it { expect(subject.to_env(conn).ssl.verify).to be_falsey } + it { expect(subject.to_env(conn).ssl.verify_hostname).to be_falsey } + end + + context 'when HTTP method is post' do + let(:http_method) { :post } + + it { expect(subject.http_method).to eq(:post) } + end + + context 'when setting the url on setup with a URI' do + let(:block) { proc { |req| req.url URI.parse('foo.json?a=1') } } + + it { expect(subject.path).to eq(URI.parse('foo.json')) } + it { expect(subject.params).to eq('a' => '1') } + it { expect(subject.to_env(conn).url.to_s).to eq('http://httpbingo.org/api/foo.json?a=1') } + end + + context 'when setting the url on setup with a string path and params' do + let(:block) { proc { |req| req.url 'foo.json', 'a' => 1 } } + + it { expect(subject.path).to eq('foo.json') } + it { expect(subject.params).to eq('a' => 1) } + it { expect(subject.to_env(conn).url.to_s).to eq('http://httpbingo.org/api/foo.json?a=1') } + end + + context 'when setting the url on setup with a path including params' do + let(:block) { proc { |req| req.url 'foo.json?b=2&a=1#qqq' } } + + it { expect(subject.path).to eq('foo.json') } + it { expect(subject.params).to eq('a' => '1', 'b' => '2') } + it { expect(subject.to_env(conn).url.to_s).to eq('http://httpbingo.org/api/foo.json?a=1&b=2') } + end + + context 'when setting a header on setup with []= syntax' do + let(:block) { proc { |req| req['Server'] = 'Faraday' } } + let(:headers) { subject.to_env(conn).request_headers } + + it { expect(subject.headers['Server']).to eq('Faraday') } + it { expect(headers['mime-version']).to eq('1.0') } + it { expect(headers['server']).to eq('Faraday') } + end + + context 'when setting the body on setup' do + let(:block) { proc { |req| req.body = 'hi' } } + + it { expect(subject.body).to eq('hi') } + it { expect(subject.to_env(conn).body).to eq('hi') } + end + + context 'with global request options set' do + let(:env_request) { subject.to_env(conn).request } + + before do + conn.options.timeout = 3 + conn.options.open_timeout = 5 + conn.ssl.verify = false + conn.proxy = 'http://proxy.com' + end + + it { expect(subject.options.timeout).to eq(3) } + it { expect(subject.options.open_timeout).to eq(5) } + it { expect(env_request.timeout).to eq(3) } + it { expect(env_request.open_timeout).to eq(5) } + + context 'and per-request options set' do + let(:block) do + proc do |req| + req.options.timeout = 10 + req.options.boundary = 'boo' + req.options.oauth[:consumer_secret] = 'xyz' + req.options.context = { + foo: 'foo', + bar: 'bar' + } + end + end + + it { expect(subject.options.timeout).to eq(10) } + it { expect(subject.options.open_timeout).to eq(5) } + it { expect(env_request.timeout).to eq(10) } + it { expect(env_request.open_timeout).to eq(5) } + it { expect(env_request.boundary).to eq('boo') } + it { expect(env_request.context).to eq(foo: 'foo', bar: 'bar') } + it do + oauth_expected = { consumer_secret: 'xyz', consumer_key: 'anonymous' } + expect(env_request.oauth).to eq(oauth_expected) + end + end + end + + it 'supports marshal serialization' do + expect(Marshal.load(Marshal.dump(subject))).to eq(subject) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/json_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/json_spec.rb new file mode 100644 index 000000000..e6cbda39b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/json_spec.rb @@ -0,0 +1,206 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Response::Json, type: :response do + let(:options) { {} } + let(:headers) { {} } + let(:middleware) do + described_class.new(lambda { |env| + Faraday::Response.new(env) + }, **options) + end + + def process(body, content_type = 'application/json', options = {}) + env = { + body: body, request: options, + request_headers: Faraday::Utils::Headers.new, + response_headers: Faraday::Utils::Headers.new(headers) + } + env[:response_headers]['content-type'] = content_type if content_type + yield(env) if block_given? + middleware.call(Faraday::Env.from(env)) + end + + context 'no type matching' do + it "doesn't change nil body" do + expect(process(nil).body).to be_nil + end + + it 'nullifies empty body' do + expect(process('').body).to be_nil + end + + it 'parses json body' do + response = process('{"a":1}') + expect(response.body).to eq('a' => 1) + expect(response.env[:raw_body]).to be_nil + end + end + + context 'with preserving raw' do + let(:options) { { preserve_raw: true } } + + it 'parses json body' do + response = process('{"a":1}') + expect(response.body).to eq('a' => 1) + expect(response.env[:raw_body]).to eq('{"a":1}') + end + end + + context 'with default regexp type matching' do + it 'parses json body of correct type' do + response = process('{"a":1}', 'application/x-json') + expect(response.body).to eq('a' => 1) + end + + it 'ignores json body of incorrect type' do + response = process('{"a":1}', 'text/json-xml') + expect(response.body).to eq('{"a":1}') + end + end + + context 'with array type matching' do + let(:options) { { content_type: %w[a/b c/d] } } + + it 'parses json body of correct type' do + expect(process('{"a":1}', 'a/b').body).to be_a(Hash) + expect(process('{"a":1}', 'c/d').body).to be_a(Hash) + end + + it 'ignores json body of incorrect type' do + expect(process('{"a":1}', 'a/d').body).not_to be_a(Hash) + end + end + + it 'chokes on invalid json' do + expect { process('{!') }.to raise_error(Faraday::ParsingError) + end + + it 'includes the response on the ParsingError instance' do + process('{') { |env| env[:response] = Faraday::Response.new } + raise 'Parsing should have failed.' + rescue Faraday::ParsingError => e + expect(e.response).to be_a(Faraday::Response) + end + + context 'HEAD responses' do + it "nullifies the body if it's only one space" do + response = process(' ') + expect(response.body).to be_nil + end + + it "nullifies the body if it's two spaces" do + response = process(' ') + expect(response.body).to be_nil + end + end + + context 'JSON options' do + let(:body) { '{"a": 1}' } + let(:result) { { a: 1 } } + let(:options) do + { + parser_options: { + symbolize_names: true + } + } + end + + it 'passes relevant options to JSON parse' do + expect(::JSON).to receive(:parse) + .with(body, options[:parser_options]) + .and_return(result) + + response = process(body) + expect(response.body).to eq(result) + end + end + + context 'with decoder' do + let(:decoder) do + double('Decoder').tap do |e| + allow(e).to receive(:load) { |s, opts| JSON.parse(s, opts) } + end + end + + let(:body) { '{"a": 1}' } + let(:result) { { a: 1 } } + + context 'when decoder is passed as object' do + let(:options) do + { + parser_options: { + decoder: decoder, + option: :option_value, + symbolize_names: true + } + } + end + + it 'passes relevant options to specified decoder\'s load method' do + expect(decoder).to receive(:load) + .with(body, { option: :option_value, symbolize_names: true }) + .and_return(result) + + response = process(body) + expect(response.body).to eq(result) + end + end + + context 'when decoder is passed as an object-method pair' do + let(:options) do + { + parser_options: { + decoder: [decoder, :load], + option: :option_value, + symbolize_names: true + } + } + end + + it 'passes relevant options to specified decoder\'s method' do + expect(decoder).to receive(:load) + .with(body, { option: :option_value, symbolize_names: true }) + .and_return(result) + + response = process(body) + expect(response.body).to eq(result) + end + end + + context 'when decoder is not passed' do + let(:options) do + { + parser_options: { + symbolize_names: true + } + } + end + + it 'passes relevant options to JSON parse' do + expect(JSON).to receive(:parse) + .with(body, { symbolize_names: true }) + .and_return(result) + + response = process(body) + expect(response.body).to eq(result) + end + + it 'passes relevant options to JSON parse even when nil responds to :load' do + original_allow_message_expectations_on_nil = RSpec::Mocks.configuration.allow_message_expectations_on_nil + RSpec::Mocks.configuration.allow_message_expectations_on_nil = true + allow(nil).to receive(:respond_to?) + .with(:load) + .and_return(true) + + expect(JSON).to receive(:parse) + .with(body, { symbolize_names: true }) + .and_return(result) + + response = process(body) + expect(response.body).to eq(result) + ensure + RSpec::Mocks.configuration.allow_message_expectations_on_nil = original_allow_message_expectations_on_nil + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/logger_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/logger_spec.rb new file mode 100644 index 000000000..e8e0bf3f9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/logger_spec.rb @@ -0,0 +1,293 @@ +# frozen_string_literal: true + +require 'stringio' +require 'logger' + +RSpec.describe Faraday::Response::Logger do + let(:string_io) { StringIO.new } + let(:logger) { Logger.new(string_io) } + let(:logger_options) { {} } + let(:conn) do + rubbles = ['Barney', 'Betty', 'Bam Bam'] + + Faraday.new do |b| + b.response :logger, logger, logger_options do |logger| + logger.filter(/(soylent green is) (.+)/, '\1 tasty') + logger.filter(/(api_key:).*"(.+)."/, '\1[API_KEY]') + logger.filter(/(password)=(.+)/, '\1=[HIDDEN]') + end + b.adapter :test do |stubs| + stubs.get('/hello') { [200, { 'Content-Type' => 'text/html' }, 'hello'] } + stubs.post('/ohai') { [200, { 'Content-Type' => 'text/html' }, 'fred'] } + stubs.post('/ohyes') { [200, { 'Content-Type' => 'text/html' }, 'pebbles'] } + stubs.get('/rubbles') { [200, { 'Content-Type' => 'application/json' }, rubbles] } + stubs.get('/filtered_body') { [200, { 'Content-Type' => 'text/html' }, 'soylent green is people'] } + stubs.get('/filtered_headers') { [200, { 'Content-Type' => 'text/html' }, 'headers response'] } + stubs.get('/filtered_params') { [200, { 'Content-Type' => 'text/html' }, 'params response'] } + stubs.get('/filtered_url') { [200, { 'Content-Type' => 'text/html' }, 'url response'] } + stubs.get('/connection_failed') { raise Faraday::ConnectionFailed, 'Failed to open TCP connection' } + end + end + end + + before do + logger.level = Logger::DEBUG + end + + it 'still returns output' do + resp = conn.get '/hello', nil, accept: 'text/html' + expect(resp.body).to eq('hello') + end + + context 'without configuration' do + let(:conn) do + Faraday.new do |b| + b.response :logger + b.adapter :test do |stubs| + stubs.get('/hello') { [200, { 'Content-Type' => 'text/html' }, 'hello'] } + end + end + end + + it 'defaults to stdout' do + expect(Logger).to receive(:new).with($stdout).and_return(Logger.new(nil)) + conn.get('/hello') + end + end + + context 'when logger with program name' do + let(:logger) { Logger.new(string_io, progname: 'my_best_program') } + + it 'logs with program name' do + conn.get '/hello' + + expect(string_io.string).to match('-- my_best_program: request:') + expect(string_io.string).to match('-- my_best_program: response:') + end + end + + context 'when logger without program name' do + it 'logs without program name' do + conn.get '/hello' + + expect(string_io.string).to match('-- : request:') + expect(string_io.string).to match('-- : response:') + end + end + + context 'with default formatter' do + let(:formatter) { instance_double(Faraday::Logging::Formatter, request: true, response: true, filter: []) } + + before { allow(Faraday::Logging::Formatter).to receive(:new).and_return(formatter) } + + it 'delegates logging to the formatter' do + expect(formatter).to receive(:request).with(an_instance_of(Faraday::Env)) + expect(formatter).to receive(:response).with(an_instance_of(Faraday::Env)) + conn.get '/hello' + end + + context 'when no route' do + it 'delegates logging to the formatter' do + expect(formatter).to receive(:request).with(an_instance_of(Faraday::Env)) + expect(formatter).to receive(:exception).with(an_instance_of(Faraday::Adapter::Test::Stubs::NotFound)) + + expect { conn.get '/noroute' }.to raise_error(Faraday::Adapter::Test::Stubs::NotFound) + end + end + end + + context 'with custom formatter' do + let(:formatter_class) do + Class.new(Faraday::Logging::Formatter) do + def request(_env) + info 'Custom log formatter request' + end + + def response(_env) + info 'Custom log formatter response' + end + end + end + + let(:logger_options) { { formatter: formatter_class } } + + it 'logs with custom formatter' do + conn.get '/hello' + + expect(string_io.string).to match('Custom log formatter request') + expect(string_io.string).to match('Custom log formatter response') + end + end + + it 'logs method and url' do + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).to match('GET http:/hello') + end + + it 'logs status' do + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).to match('Status 200') + end + + it 'does not log error message by default' do + expect { conn.get '/noroute' }.to raise_error(Faraday::Adapter::Test::Stubs::NotFound) + expect(string_io.string).not_to match(%(no stubbed request for get http:/noroute)) + end + + it 'logs request headers by default' do + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).to match(%(Accept: "text/html)) + end + + it 'logs response headers by default' do + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).to match(%(Content-Type: "text/html)) + end + + it 'does not log request body by default' do + conn.post '/ohai', 'name=Unagi', accept: 'text/html' + expect(string_io.string).not_to match(%(name=Unagi)) + end + + it 'does not log response body by default' do + conn.post '/ohai', 'name=Toro', accept: 'text/html' + expect(string_io.string).not_to match(%(fred)) + end + + it 'logs filter headers' do + conn.headers = { 'api_key' => 'ABC123' } + conn.get '/filtered_headers', nil, accept: 'text/html' + expect(string_io.string).to match(%(api_key:)) + expect(string_io.string).to match(%([API_KEY])) + expect(string_io.string).not_to match(%(ABC123)) + end + + it 'logs filter url' do + conn.get '/filtered_url?password=hunter2', nil, accept: 'text/html' + expect(string_io.string).to match(%([HIDDEN])) + expect(string_io.string).not_to match(%(hunter2)) + end + + context 'when not logging request headers' do + let(:logger_options) { { headers: { request: false } } } + + it 'does not log request headers if option is false' do + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).not_to match(%(Accept: "text/html)) + end + end + + context 'when not logging response headers' do + let(:logger_options) { { headers: { response: false } } } + + it 'does not log response headers if option is false' do + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).not_to match(%(Content-Type: "text/html)) + end + end + + context 'when logging request body' do + let(:logger_options) { { bodies: { request: true } } } + + it 'logs only request body' do + conn.post '/ohyes', 'name=Tamago', accept: 'text/html' + expect(string_io.string).to match(%(name=Tamago)) + expect(string_io.string).not_to match(%(pebbles)) + end + end + + context 'when logging response body' do + let(:logger_options) { { bodies: { response: true } } } + + it 'logs only response body' do + conn.post '/ohyes', 'name=Hamachi', accept: 'text/html' + expect(string_io.string).to match(%(pebbles)) + expect(string_io.string).not_to match(%(name=Hamachi)) + end + end + + context 'when logging request and response bodies' do + let(:logger_options) { { bodies: true } } + + it 'logs request and response body' do + conn.post '/ohyes', 'name=Ebi', accept: 'text/html' + expect(string_io.string).to match(%(name=Ebi)) + expect(string_io.string).to match(%(pebbles)) + end + + it 'logs response body object' do + conn.get '/rubbles', nil, accept: 'text/html' + expect(string_io.string).to match(%([\"Barney\", \"Betty\", \"Bam Bam\"]\n)) + end + + it 'logs filter body' do + conn.get '/filtered_body', nil, accept: 'text/html' + expect(string_io.string).to match(%(soylent green is)) + expect(string_io.string).to match(%(tasty)) + expect(string_io.string).not_to match(%(people)) + end + end + + context 'when bodies are logged by default' do + before do + described_class.default_options = { bodies: true } + end + + it 'logs response body' do + conn.post '/ohai' + expect(string_io.string).to match(%(fred)) + end + + after do + described_class.default_options = { bodies: false } + end + end + + context 'when logging errors' do + let(:logger_options) { { errors: true } } + + it 'logs error message' do + expect { conn.get '/noroute' }.to raise_error(Faraday::Adapter::Test::Stubs::NotFound) + expect(string_io.string).to match(%(no stubbed request for get http:/noroute)) + end + end + + context 'when logging headers and errors' do + let(:logger_options) { { headers: true, errors: true } } + + it 'logs error message' do + expect { conn.get '/connection_failed' }.to raise_error(Faraday::ConnectionFailed) + expect(string_io.string).to match(%(Failed to open TCP connection)) + end + end + + context 'when using log_level' do + let(:logger_options) { { bodies: true, log_level: :debug } } + + it 'logs request/request body on the specified level (debug)' do + logger.level = Logger::DEBUG + conn.post '/ohyes', 'name=Ebi', accept: 'text/html' + expect(string_io.string).to match(%(name=Ebi)) + expect(string_io.string).to match(%(pebbles)) + end + + it 'logs headers on the debug level' do + logger.level = Logger::DEBUG + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).to match(%(Content-Type: "text/html)) + end + + it 'does not log request/response body on the info level' do + logger.level = Logger::INFO + conn.post '/ohyes', 'name=Ebi', accept: 'text/html' + expect(string_io.string).not_to match(%(name=Ebi)) + expect(string_io.string).not_to match(%(pebbles)) + end + + it 'does not log headers on the info level' do + logger.level = Logger::INFO + conn.get '/hello', nil, accept: 'text/html' + expect(string_io.string).not_to match(%(Content-Type: "text/html)) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/raise_error_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/raise_error_spec.rb new file mode 100644 index 000000000..18c2044a3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response/raise_error_spec.rb @@ -0,0 +1,275 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Response::RaiseError do + let(:conn) do + Faraday.new do |b| + b.response :raise_error + b.adapter :test do |stub| + stub.get('ok') { [200, { 'Content-Type' => 'text/html' }, ''] } + stub.get('bad-request') { [400, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('unauthorized') { [401, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('forbidden') { [403, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('not-found') { [404, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('proxy-error') { [407, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('request-timeout') { [408, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('conflict') { [409, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('unprocessable-entity') { [422, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('too-many-requests') { [429, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('4xx') { [499, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('nil-status') { [nil, { 'X-Reason' => 'nil' }, 'fail'] } + stub.get('server-error') { [500, { 'X-Error' => 'bailout' }, 'fail'] } + end + end + end + + it 'raises no exception for 200 responses' do + expect { conn.get('ok') }.not_to raise_error + end + + it 'raises Faraday::BadRequestError for 400 responses' do + expect { conn.get('bad-request') }.to raise_error(Faraday::BadRequestError) do |ex| + expect(ex.message).to eq('the server responded with status 400 for GET http:/bad-request') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(400) + expect(ex.response_status).to eq(400) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::UnauthorizedError for 401 responses' do + expect { conn.get('unauthorized') }.to raise_error(Faraday::UnauthorizedError) do |ex| + expect(ex.message).to eq('the server responded with status 401 for GET http:/unauthorized') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(401) + expect(ex.response_status).to eq(401) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::ForbiddenError for 403 responses' do + expect { conn.get('forbidden') }.to raise_error(Faraday::ForbiddenError) do |ex| + expect(ex.message).to eq('the server responded with status 403 for GET http:/forbidden') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(403) + expect(ex.response_status).to eq(403) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::ResourceNotFound for 404 responses' do + expect { conn.get('not-found') }.to raise_error(Faraday::ResourceNotFound) do |ex| + expect(ex.message).to eq('the server responded with status 404 for GET http:/not-found') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(404) + expect(ex.response_status).to eq(404) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::ProxyAuthError for 407 responses' do + expect { conn.get('proxy-error') }.to raise_error(Faraday::ProxyAuthError) do |ex| + expect(ex.message).to eq('407 "Proxy Authentication Required"') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(407) + expect(ex.response_status).to eq(407) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::RequestTimeoutError for 408 responses' do + expect { conn.get('request-timeout') }.to raise_error(Faraday::RequestTimeoutError) do |ex| + expect(ex.message).to eq('the server responded with status 408 for GET http:/request-timeout') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(408) + expect(ex.response_status).to eq(408) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::ConflictError for 409 responses' do + expect { conn.get('conflict') }.to raise_error(Faraday::ConflictError) do |ex| + expect(ex.message).to eq('the server responded with status 409 for GET http:/conflict') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(409) + expect(ex.response_status).to eq(409) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::UnprocessableEntityError for 422 responses' do + expect { conn.get('unprocessable-entity') }.to raise_error(Faraday::UnprocessableEntityError) do |ex| + expect(ex.message).to eq('the server responded with status 422 for GET http:/unprocessable-entity') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(422) + expect(ex.response_status).to eq(422) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::TooManyRequestsError for 429 responses' do + expect { conn.get('too-many-requests') }.to raise_error(Faraday::TooManyRequestsError) do |ex| + expect(ex.message).to eq('the server responded with status 429 for GET http:/too-many-requests') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(429) + expect(ex.response_status).to eq(429) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::NilStatusError for nil status in response' do + expect { conn.get('nil-status') }.to raise_error(Faraday::NilStatusError) do |ex| + expect(ex.message).to eq('http status could not be derived from the server response') + expect(ex.response[:headers]['X-Reason']).to eq('nil') + expect(ex.response[:status]).to be_nil + expect(ex.response_status).to be_nil + expect(ex.response_body).to eq('fail') + expect(ex.response_headers['X-Reason']).to eq('nil') + end + end + + it 'raises Faraday::ClientError for other 4xx responses' do + expect { conn.get('4xx') }.to raise_error(Faraday::ClientError) do |ex| + expect(ex.message).to eq('the server responded with status 499 for GET http:/4xx') + expect(ex.response[:headers]['X-Reason']).to eq('because') + expect(ex.response[:status]).to eq(499) + expect(ex.response_status).to eq(499) + expect(ex.response_body).to eq('keep looking') + expect(ex.response_headers['X-Reason']).to eq('because') + end + end + + it 'raises Faraday::ServerError for 500 responses' do + expect { conn.get('server-error') }.to raise_error(Faraday::ServerError) do |ex| + expect(ex.message).to eq('the server responded with status 500 for GET http:/server-error') + expect(ex.response[:headers]['X-Error']).to eq('bailout') + expect(ex.response[:status]).to eq(500) + expect(ex.response_status).to eq(500) + expect(ex.response_body).to eq('fail') + expect(ex.response_headers['X-Error']).to eq('bailout') + end + end + + describe 'request info' do + let(:conn) do + Faraday.new do |b| + b.response :raise_error, **middleware_options + b.adapter :test do |stub| + stub.post(url, request_body, request_headers) do + [400, { 'X-Reason' => 'because' }, 'keep looking'] + end + end + end + end + let(:middleware_options) { {} } + let(:request_body) { JSON.generate({ 'item' => 'sth' }) } + let(:request_headers) { { 'Authorization' => 'Basic 123' } } + let(:url_path) { 'request' } + let(:query_params) { 'full=true' } + let(:url) { "#{url_path}?#{query_params}" } + + subject(:perform_request) do + conn.post url do |req| + req.headers['Authorization'] = 'Basic 123' + req.body = request_body + end + end + + it 'returns the request info in the exception' do + expect { perform_request }.to raise_error(Faraday::BadRequestError) do |ex| + expect(ex.response[:request][:method]).to eq(:post) + expect(ex.response[:request][:url]).to eq(URI("http:/#{url}")) + expect(ex.response[:request][:url_path]).to eq("/#{url_path}") + expect(ex.response[:request][:params]).to eq({ 'full' => 'true' }) + expect(ex.response[:request][:headers]).to match(a_hash_including(request_headers)) + expect(ex.response[:request][:body]).to eq(request_body) + end + end + + describe 'DEFAULT_OPTION: include_request' do + before(:each) do + Faraday::Response::RaiseError.instance_variable_set(:@default_options, nil) + Faraday::Middleware.instance_variable_set(:@default_options, nil) + end + + after(:all) do + Faraday::Response::RaiseError.instance_variable_set(:@default_options, nil) + Faraday::Middleware.instance_variable_set(:@default_options, nil) + end + + context 'when RaiseError DEFAULT_OPTION (include_request: true) is used' do + it 'includes request info in the exception' do + expect { perform_request }.to raise_error(Faraday::BadRequestError) do |ex| + expect(ex.response.keys).to contain_exactly( + :status, + :headers, + :body, + :request + ) + end + end + end + + context 'when application sets default_options `include_request: false`' do + before(:each) do + Faraday::Response::RaiseError.default_options = { include_request: false } + end + + context 'and when include_request option is omitted' do + it 'does not include request info in the exception' do + expect { perform_request }.to raise_error(Faraday::BadRequestError) do |ex| + expect(ex.response.keys).to contain_exactly( + :status, + :headers, + :body + ) + end + end + end + + context 'and when include_request option is explicitly set for instance' do + let(:middleware_options) { { include_request: true } } + + it 'includes request info in the exception' do + expect { perform_request }.to raise_error(Faraday::BadRequestError) do |ex| + expect(ex.response.keys).to contain_exactly( + :status, + :headers, + :body, + :request + ) + end + end + end + end + end + end + + describe 'allowing certain status codes' do + let(:conn) do + Faraday.new do |b| + b.response :raise_error, allowed_statuses: [404] + b.adapter :test do |stub| + stub.get('bad-request') { [400, { 'X-Reason' => 'because' }, 'keep looking'] } + stub.get('not-found') { [404, { 'X-Reason' => 'because' }, 'keep looking'] } + end + end + end + + it 'raises an error for status codes that are not explicitly allowed' do + expect { conn.get('bad-request') }.to raise_error(Faraday::BadRequestError) + end + + it 'does not raise an error for allowed status codes' do + expect { conn.get('not-found') }.not_to raise_error + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response_spec.rb new file mode 100644 index 000000000..e3e2c2378 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/response_spec.rb @@ -0,0 +1,77 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Response do + subject { Faraday::Response.new(env) } + + let(:env) do + Faraday::Env.from(status: 404, body: 'yikes', url: Faraday::Utils.URI('https://lostisland.github.io/faraday'), + response_headers: { 'Content-Type' => 'text/plain' }) + end + + it { expect(subject.finished?).to be_truthy } + it { expect { subject.finish({}) }.to raise_error(RuntimeError) } + it { expect(subject.success?).to be_falsey } + it { expect(subject.status).to eq(404) } + it { expect(subject.body).to eq('yikes') } + it { expect(subject.headers['Content-Type']).to eq('text/plain') } + it { expect(subject['content-type']).to eq('text/plain') } + + describe '#apply_request' do + before { subject.apply_request(body: 'a=b', method: :post) } + + it { expect(subject.body).to eq('yikes') } + it { expect(subject.env[:method]).to eq(:post) } + end + + describe '#to_hash' do + let(:hash) { subject.to_hash } + + it { expect(hash).to be_a(Hash) } + it { expect(hash[:status]).to eq(subject.status) } + it { expect(hash[:response_headers]).to eq(subject.headers) } + it { expect(hash[:body]).to eq(subject.body) } + it { expect(hash[:url]).to eq(subject.env.url) } + end + + describe 'marshal serialization support' do + subject { Faraday::Response.new } + let(:loaded) { Marshal.load(Marshal.dump(subject)) } + + before do + subject.on_complete {} + subject.finish(env.merge(params: 'moo')) + end + + it { expect(loaded.env[:params]).to be_nil } + it { expect(loaded.env[:body]).to eq(env[:body]) } + it { expect(loaded.env[:response_headers]).to eq(env[:response_headers]) } + it { expect(loaded.env[:status]).to eq(env[:status]) } + it { expect(loaded.env[:url]).to eq(env[:url]) } + end + + describe '#on_complete' do + subject { Faraday::Response.new } + + it 'parse body on finish' do + subject.on_complete { |env| env[:body] = env[:body].upcase } + subject.finish(env) + + expect(subject.body).to eq('YIKES') + end + + it 'can access response body in on_complete callback' do + subject.on_complete { |env| env[:body] = subject.body.upcase } + subject.finish(env) + + expect(subject.body).to eq('YIKES') + end + + it 'can access response body in on_complete callback' do + callback_env = nil + subject.on_complete { |env| callback_env = env } + subject.finish({}) + + expect(subject.env).to eq(callback_env) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils/headers_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils/headers_spec.rb new file mode 100644 index 000000000..238bfd990 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils/headers_spec.rb @@ -0,0 +1,109 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Utils::Headers do + subject { Faraday::Utils::Headers.new } + + context 'when Content-Type is set to application/json' do + before { subject['Content-Type'] = 'application/json' } + + it { expect(subject.keys).to eq(['Content-Type']) } + it { expect(subject['Content-Type']).to eq('application/json') } + it { expect(subject['CONTENT-TYPE']).to eq('application/json') } + it { expect(subject['content-type']).to eq('application/json') } + it { is_expected.to include('content-type') } + end + + context 'when Content-Type is set to application/xml' do + before { subject['Content-Type'] = 'application/xml' } + + it { expect(subject.keys).to eq(['Content-Type']) } + it { expect(subject['Content-Type']).to eq('application/xml') } + it { expect(subject['CONTENT-TYPE']).to eq('application/xml') } + it { expect(subject['content-type']).to eq('application/xml') } + it { is_expected.to include('content-type') } + end + + describe '#fetch' do + before { subject['Content-Type'] = 'application/json' } + + it { expect(subject.fetch('Content-Type')).to eq('application/json') } + it { expect(subject.fetch('CONTENT-TYPE')).to eq('application/json') } + it { expect(subject.fetch(:content_type)).to eq('application/json') } + it { expect(subject.fetch('invalid', 'default')).to eq('default') } + it { expect(subject.fetch('invalid', false)).to eq(false) } + it { expect(subject.fetch('invalid', nil)).to be_nil } + it { expect(subject.fetch('Invalid') { |key| "#{key} key" }).to eq('Invalid key') } + it 'calls a block when provided' do + block_called = false + expect(subject.fetch('content-type') { block_called = true }).to eq('application/json') + expect(block_called).to be_falsey + end + it 'raises an error if key not found' do + expected_error = defined?(KeyError) ? KeyError : IndexError + expect { subject.fetch('invalid') }.to raise_error(expected_error) + end + end + + describe '#delete' do + before do + subject['Content-Type'] = 'application/json' + @deleted = subject.delete('content-type') + end + + it { expect(@deleted).to eq('application/json') } + it { expect(subject.size).to eq(0) } + it { is_expected.not_to include('content-type') } + it { expect(subject.delete('content-type')).to be_nil } + end + + describe '#dig' do + before { subject['Content-Type'] = 'application/json' } + + it { expect(subject&.dig('Content-Type')).to eq('application/json') } + it { expect(subject&.dig('CONTENT-TYPE')).to eq('application/json') } + it { expect(subject&.dig(:content_type)).to eq('application/json') } + it { expect(subject&.dig('invalid')).to be_nil } + end + + describe '#parse' do + context 'when response headers leave http status line out' do + let(:headers) { "HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n\r\n" } + + before { subject.parse(headers) } + + it { expect(subject.keys).to eq(%w[Content-Type]) } + it { expect(subject['Content-Type']).to eq('text/html') } + it { expect(subject['content-type']).to eq('text/html') } + end + + context 'when response headers values include a colon' do + let(:headers) { "HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nLocation: http://httpbingo.org/\r\n\r\n" } + + before { subject.parse(headers) } + + it { expect(subject['location']).to eq('http://httpbingo.org/') } + end + + context 'when response headers include a blank line' do + let(:headers) { "HTTP/1.1 200 OK\r\n\r\nContent-Type: text/html\r\n\r\n" } + + before { subject.parse(headers) } + + it { expect(subject['content-type']).to eq('text/html') } + end + + context 'when response headers include already stored keys' do + let(:headers) { "HTTP/1.1 200 OK\r\nX-Numbers: 123\r\n\r\n" } + + before do + h = subject + h[:x_numbers] = 8 + h.parse(headers) + end + + it do + expect(subject[:x_numbers]).to eq('8, 123') + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils_spec.rb new file mode 100644 index 000000000..24269db70 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday/utils_spec.rb @@ -0,0 +1,120 @@ +# frozen_string_literal: true + +RSpec.describe Faraday::Utils do + describe 'headers parsing' do + let(:multi_response_headers) do + "HTTP/1.x 500 OK\r\nContent-Type: text/html; charset=UTF-8\r\n" \ + "HTTP/1.x 200 OK\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n" + end + + it 'parse headers for aggregated responses' do + headers = Faraday::Utils::Headers.new + headers.parse(multi_response_headers) + + result = headers.to_hash + + expect(result['Content-Type']).to eq('application/json; charset=UTF-8') + end + end + + describe 'URI parsing' do + let(:url) { 'http://example.com/abc' } + + it 'escapes safe buffer' do + str = FakeSafeBuffer.new('$32,000.00') + expect(Faraday::Utils.escape(str)).to eq('%2432%2C000.00') + end + + it 'parses with default parser' do + with_default_uri_parser(nil) do + uri = normalize(url) + expect(uri.host).to eq('example.com') + end + end + + it 'parses with URI' do + with_default_uri_parser(::URI) do + uri = normalize(url) + expect(uri.host).to eq('example.com') + end + end + + it 'parses with block' do + with_default_uri_parser(->(u) { "booya#{'!' * u.size}" }) do + expect(normalize(url)).to eq('booya!!!!!!!!!!!!!!!!!!!!!!') + end + end + + it 'replaces headers hash' do + headers = Faraday::Utils::Headers.new('authorization' => 't0ps3cr3t!') + expect(headers).to have_key('authorization') + + headers.replace('content-type' => 'text/plain') + expect(headers).not_to have_key('authorization') + end + end + + describe '.deep_merge!' do + let(:connection_options) { Faraday::ConnectionOptions.new } + let(:url) do + { + url: 'http://example.com/abc', + headers: { 'Mime-Version' => '1.0' }, + request: { oauth: { consumer_key: 'anonymous' } }, + ssl: { version: '2' } + } + end + + it 'recursively merges the headers' do + connection_options.headers = { user_agent: 'My Agent 1.0' } + deep_merge = Faraday::Utils.deep_merge!(connection_options, url) + + expect(deep_merge.headers).to eq('Mime-Version' => '1.0', user_agent: 'My Agent 1.0') + end + + context 'when a target hash has an Options Struct value' do + let(:request) do + { + params_encoder: nil, + proxy: nil, + bind: nil, + timeout: nil, + open_timeout: nil, + read_timeout: nil, + write_timeout: nil, + boundary: nil, + oauth: { consumer_key: 'anonymous' }, + context: nil, + on_data: nil + } + end + let(:ssl) do + { + verify: nil, + ca_file: nil, + ca_path: nil, + verify_mode: nil, + cert_store: nil, + client_cert: nil, + client_key: nil, + certificate: nil, + private_key: nil, + verify_depth: nil, + version: '2', + min_version: nil, + max_version: nil, + verify_hostname: nil, + hostname: nil, + ciphers: nil + } + end + + it 'does not overwrite an Options Struct value' do + deep_merge = Faraday::Utils.deep_merge!(connection_options, url) + + expect(deep_merge.request.to_h).to eq(request) + expect(deep_merge.ssl.to_h).to eq(ssl) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday_spec.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday_spec.rb new file mode 100644 index 000000000..c3583f184 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/faraday_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +RSpec.describe Faraday do + it 'has a version number' do + expect(Faraday::VERSION).not_to be nil + end + + context 'proxies to default_connection' do + let(:mock_connection) { double('Connection') } + before do + Faraday.default_connection = mock_connection + end + + it 'proxies methods that exist on the default_connection' do + expect(mock_connection).to receive(:this_should_be_proxied) + + Faraday.this_should_be_proxied + end + + it 'uses method_missing on Faraday if there is no proxyable method' do + expected_message = + if RUBY_VERSION >= '3.4' + "undefined method 'this_method_does_not_exist' for module Faraday" + elsif RUBY_VERSION >= '3.3' + "undefined method `this_method_does_not_exist' for module Faraday" + else + "undefined method `this_method_does_not_exist' for Faraday:Module" + end + + expect { Faraday.this_method_does_not_exist }.to raise_error(NoMethodError, expected_message) + end + + it 'proxied methods can be accessed' do + allow(mock_connection).to receive(:this_should_be_proxied) + + expect(Faraday.method(:this_should_be_proxied)).to be_a(Method) + end + + after do + Faraday.default_connection = nil + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/spec_helper.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/spec_helper.rb new file mode 100644 index 000000000..1b80ea248 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/spec_helper.rb @@ -0,0 +1,133 @@ +# frozen_string_literal: true + +# This file was generated by the `rspec --init` command. Conventionally, all +# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. +# The generated `.rspec` file contains `--require spec_helper` which will cause +# this file to always be loaded, without a need to explicitly require it in any +# files. +# +# Given that it is always loaded, you are encouraged to keep this file as +# light-weight as possible. Requiring heavyweight dependencies from this file +# will add to the boot time of your test suite on EVERY test run, even for an +# individual file that may not need all of that loaded. Instead, consider making +# a separate helper file that requires the additional dependencies and performs +# the additional setup, and require it from the spec files that actually need +# it. +# +# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration + +require 'simplecov' +require 'coveralls' +require 'webmock/rspec' +WebMock.disable_net_connect!(allow_localhost: true) + +SimpleCov.formatters = [SimpleCov::Formatter::HTMLFormatter, Coveralls::SimpleCov::Formatter] + +SimpleCov.start do + add_filter '/spec/' + minimum_coverage 84 + minimum_coverage_by_file 26 +end + +require 'faraday' +require 'pry' + +# Ensure all /lib files are loaded +# so they will be included in the test coverage report. +Dir['./lib/**/*.rb'].each { |file| require file } + +# Load all Rspec support files +Dir['./spec/support/**/*.rb'].each { |file| require file } + +RSpec.configure do |config| + # rspec-expectations config goes here. You can use an alternate + # assertion/expectation library such as wrong or the stdlib/minitest + # assertions if you prefer. + config.expect_with :rspec do |expectations| + # This option will default to `true` in RSpec 4. It makes the `description` + # and `failure_message` of custom matchers include text for helper methods + # defined using `chain`, e.g.: + # be_bigger_than(2).and_smaller_than(4).description + # # => "be bigger than 2 and smaller than 4" + # ...rather than: + # # => "be bigger than 2" + expectations.include_chain_clauses_in_custom_matcher_descriptions = true + end + + # rspec-mocks config goes here. You can use an alternate test double + # library (such as bogus or mocha) by changing the `mock_with` option here. + config.mock_with :rspec do |mocks| + # Prevents you from mocking or stubbing a method that does not exist on + # a real object. This is generally recommended, and will default to + # `true` in RSpec 4. + mocks.verify_partial_doubles = true + end + + # This option will default to `:apply_to_host_groups` in RSpec 4 (and will + # have no way to turn it off -- the option exists only for backwards + # compatibility in RSpec 3). It causes shared context metadata to be + # inherited by the metadata hash of host groups and examples, rather than + # triggering implicit auto-inclusion in groups with matching metadata. + config.shared_context_metadata_behavior = :apply_to_host_groups + + # This allows you to limit a spec run to individual examples or groups + # you care about by tagging them with `:focus` metadata. When nothing + # is tagged with `:focus`, all examples get run. RSpec also provides + # aliases for `it`, `describe`, and `context` that include `:focus` + # metadata: `fit`, `fdescribe` and `fcontext`, respectively. + # config.filter_run_when_matching :focus + + # Allows RSpec to persist some state between runs in order to support + # the `--only-failures` and `--next-failure` CLI options. We recommend + # you configure your source control system to ignore this file. + # config.example_status_persistence_file_path = "spec/examples.txt" + + # Limits the available syntax to the non-monkey patched syntax that is + # recommended. For more details, see: + # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ + # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ + # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode + # config.disable_monkey_patching! + + # This setting enables warnings. It's recommended, but in some cases may + # be too noisy due to issues in dependencies. + # config.warnings = true + + # Many RSpec users commonly either run the entire suite or an individual + # file, and it's useful to allow more verbose output when running an + # individual spec file. + # if config.files_to_run.one? + # # Use the documentation formatter for detailed output, + # # unless a formatter has already been configured + # # (e.g. via a command-line flag). + # config.default_formatter = "doc" + # end + + # Print the 10 slowest examples and example groups at the + # end of the spec run, to help surface which specs are running + # particularly slow. + # config.profile_examples = 10 + + # Run specs in random order to surface order dependencies. If you find an + # order dependency and want to debug it, you can fix the order by providing + # the seed, which is printed after each run. + # --seed 1234 + config.order = :random + + # Seed global randomization in this process using the `--seed` CLI option. + # Setting this allows you to use `--seed` to deterministically reproduce + # test failures related to randomization by passing the same `--seed` value + # as the one that triggered the failure. + Kernel.srand config.seed + + config.include Faraday::HelperMethods +end + +# Extends RSpec DocumentationFormatter to hide skipped tests. +module FormatterOverrides + def example_pending(_arg); end + + def dump_pending(_arg); end + + RSpec::Core::Formatters::DocumentationFormatter.prepend self +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/disabling_stub.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/disabling_stub.rb new file mode 100644 index 000000000..3df2f21b6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/disabling_stub.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +# Allows to disable WebMock stubs +module DisablingStub + def disable + @disabled = true + end + + def disabled? + @disabled + end + + WebMock::RequestStub.prepend self +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/fake_safe_buffer.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/fake_safe_buffer.rb new file mode 100644 index 000000000..69afd6ea9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/fake_safe_buffer.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +# emulates ActiveSupport::SafeBuffer#gsub +FakeSafeBuffer = Struct.new(:string) do + def to_s + self + end + + def gsub(regex) + string.gsub(regex) do + match, = Regexp.last_match(0), '' =~ /a/ # rubocop:disable Performance/StringInclude + yield(match) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/faraday_middleware_subclasses.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/faraday_middleware_subclasses.rb new file mode 100644 index 000000000..4e63f61a1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/faraday_middleware_subclasses.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +module FaradayMiddlewareSubclasses + class SubclassNoOptions < Faraday::Middleware + end + + class SubclassOneOption < Faraday::Middleware + DEFAULT_OPTIONS = { some_other_option: false }.freeze + end + + class SubclassTwoOptions < Faraday::Middleware + DEFAULT_OPTIONS = { some_option: true, some_other_option: false }.freeze + end +end + +Faraday::Response.register_middleware(no_options: FaradayMiddlewareSubclasses::SubclassNoOptions) +Faraday::Response.register_middleware(one_option: FaradayMiddlewareSubclasses::SubclassOneOption) +Faraday::Response.register_middleware(two_options: FaradayMiddlewareSubclasses::SubclassTwoOptions) diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/helper_methods.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/helper_methods.rb new file mode 100644 index 000000000..0f5d4f5a5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/helper_methods.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +module Faraday + module HelperMethods + def self.included(base) + base.extend ClassMethods + end + + module ClassMethods + def features(*features) + @features = features + end + + def on_feature(name) + yield if block_given? && feature?(name) + end + + def feature?(name) + if @features.nil? + superclass.feature?(name) if superclass.respond_to?(:feature?) + elsif @features.include?(name) + true + end + end + + def method_with_body?(method) + METHODS_WITH_BODY.include?(method.to_s) + end + end + + def ssl_mode? + ENV['SSL'] == 'yes' + end + + def normalize(url) + Faraday::Utils::URI(url) + end + + def with_default_uri_parser(parser) + old_parser = Faraday::Utils.default_uri_parser + begin + Faraday::Utils.default_uri_parser = parser + yield + ensure + Faraday::Utils.default_uri_parser = old_parser + end + end + + def with_env(new_env) + old_env = {} + + new_env.each do |key, value| + old_env[key] = ENV.fetch(key, false) + ENV[key] = value + end + + begin + yield + ensure + old_env.each do |key, value| + value == false ? ENV.delete(key) : ENV[key] = value + end + end + end + + def with_env_proxy_disabled + Faraday.ignore_env_proxy = true + + begin + yield + ensure + Faraday.ignore_env_proxy = false + end + end + + def capture_warnings + old = $stderr + $stderr = StringIO.new + begin + yield + $stderr.string + ensure + $stderr = old + end + end + + def method_with_body?(method) + self.class.method_with_body?(method) + end + + def big_string + kb = 1024 + (32..126).map(&:chr).cycle.take(50 * kb).join + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/adapter.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/adapter.rb new file mode 100644 index 000000000..625690883 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/adapter.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +shared_examples 'an adapter' do |**options| + before { skip } if options[:skip] + + context 'with SSL enabled' do + before { ENV['SSL'] = 'yes' } + include_examples 'adapter examples', options + end + + context 'with SSL disabled' do + before { ENV['SSL'] = 'no' } + include_examples 'adapter examples', options + end +end + +shared_examples 'adapter examples' do |**options| + include Faraday::StreamingResponseChecker + + let(:adapter) { described_class.name.split('::').last } + + let(:conn_options) { { headers: { 'X-Faraday-Adapter' => adapter } }.merge(options[:conn_options] || {}) } + + let(:adapter_options) do + return [] unless options[:adapter_options] + + if options[:adapter_options].is_a?(Array) + options[:adapter_options] + else + [options[:adapter_options]] + end + end + + let(:protocol) { ssl_mode? ? 'https' : 'http' } + let(:remote) { "#{protocol}://example.com" } + let(:stub_remote) { remote } + + let(:conn) do + conn_options[:ssl] ||= {} + conn_options[:ssl][:ca_file] ||= ENV.fetch('SSL_FILE', nil) + conn_options[:ssl][:verify_hostname] ||= ENV['SSL_VERIFY_HOSTNAME'] == 'yes' + + Faraday.new(remote, conn_options) do |conn| + conn.request :url_encoded + conn.response :raise_error + conn.adapter described_class, *adapter_options + end + end + + let!(:request_stub) { stub_request(http_method, stub_remote) } + + after do + expect(request_stub).to have_been_requested unless request_stub.disabled? + end + + describe '#delete' do + let(:http_method) { :delete } + + it_behaves_like 'a request method', :delete + end + + describe '#get' do + let(:http_method) { :get } + + it_behaves_like 'a request method', :get + end + + describe '#head' do + let(:http_method) { :head } + + it_behaves_like 'a request method', :head + end + + describe '#options' do + let(:http_method) { :options } + + it_behaves_like 'a request method', :options + end + + describe '#patch' do + let(:http_method) { :patch } + + it_behaves_like 'a request method', :patch + end + + describe '#post' do + let(:http_method) { :post } + + it_behaves_like 'a request method', :post + end + + describe '#put' do + let(:http_method) { :put } + + it_behaves_like 'a request method', :put + end + + on_feature :trace_method do + describe '#trace' do + let(:http_method) { :trace } + + it_behaves_like 'a request method', :trace + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/params_encoder.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/params_encoder.rb new file mode 100644 index 000000000..38c856799 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/params_encoder.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +shared_examples 'a params encoder' do + it 'escapes safe buffer' do + monies = FakeSafeBuffer.new('$32,000.00') + expect(subject.encode('a' => monies)).to eq('a=%2432%2C000.00') + end + + it 'raises type error for empty string' do + expect { subject.encode('') }.to raise_error(TypeError) do |error| + expect(error.message).to eq("Can't convert String into Hash.") + end + end + + it 'encodes nil' do + expect(subject.encode('a' => nil)).to eq('a') + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/request_method.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/request_method.rb new file mode 100644 index 000000000..afa337677 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/shared_examples/request_method.rb @@ -0,0 +1,263 @@ +# frozen_string_literal: true + +shared_examples 'proxy examples' do + it 'handles requests with proxy' do + res = conn.public_send(http_method, '/') + + expect(res.status).to eq(200) + end + + it 'handles proxy failures' do + request_stub.to_return(status: 407) + + expect { conn.public_send(http_method, '/') }.to raise_error(Faraday::ProxyAuthError) + end +end + +shared_examples 'a request method' do |http_method| + let(:query_or_body) { method_with_body?(http_method) ? :body : :query } + let(:response) { conn.public_send(http_method, '/') } + + unless http_method == :head && feature?(:skip_response_body_on_head) + it 'retrieves the response body' do + res_body = 'test' + request_stub.to_return(body: res_body) + expect(conn.public_send(http_method, '/').body).to eq(res_body) + end + end + + it 'handles headers with multiple values' do + request_stub.to_return(headers: { 'Set-Cookie' => 'name=value' }) + expect(response.headers['set-cookie']).to eq('name=value') + end + + it 'retrieves the response headers' do + request_stub.to_return(headers: { 'Content-Type' => 'text/plain' }) + expect(response.headers['Content-Type']).to match(%r{text/plain}) + expect(response.headers['content-type']).to match(%r{text/plain}) + end + + it 'sends user agent' do + request_stub.with(headers: { 'User-Agent' => 'Agent Faraday' }) + conn.public_send(http_method, '/', nil, user_agent: 'Agent Faraday') + end + + it 'represents empty body response as blank string' do + expect(response.body).to eq('') + end + + it 'handles connection error' do + request_stub.disable + expect { conn.public_send(http_method, 'http://localhost:4') }.to raise_error(Faraday::ConnectionFailed) + end + + on_feature :local_socket_binding do + it 'binds local socket' do + stub_request(http_method, 'http://example.com') + + host = '1.2.3.4' + port = 1234 + conn_options[:request] = { bind: { host: host, port: port } } + + conn.public_send(http_method, '/') + + expect(conn.options[:bind][:host]).to eq(host) + expect(conn.options[:bind][:port]).to eq(port) + end + end + + # context 'when wrong ssl certificate is provided' do + # let(:ca_file_path) { 'tmp/faraday-different-ca-cert.crt' } + # before { conn_options.merge!(ssl: { ca_file: ca_file_path }) } + # + # it do + # expect { conn.public_send(http_method, '/') }.to raise_error(Faraday::SSLError) # do |ex| + # expect(ex.message).to include?('certificate') + # end + # end + # end + + on_feature :request_body_on_query_methods do + it 'sends request body' do + request_stub.with({ body: 'test' }) + res = if query_or_body == :body + conn.public_send(http_method, '/', 'test') + else + conn.public_send(http_method, '/') do |req| + req.body = 'test' + end + end + expect(res.env.request_body).to eq('test') + end + end + + it 'sends url encoded parameters' do + payload = { name: 'zack' } + request_stub.with({ query_or_body => payload }) + res = conn.public_send(http_method, '/', payload) + if query_or_body == :query + expect(res.env.request_body).to be_nil + else + expect(res.env.request_body).to eq('name=zack') + end + end + + it 'sends url encoded nested parameters' do + payload = { name: { first: 'zack' } } + request_stub.with({ query_or_body => payload }) + conn.public_send(http_method, '/', payload) + end + + # TODO: This needs reimplementation: see https://github.com/lostisland/faraday/issues/718 + # Should raise Faraday::TimeoutError + it 'supports timeout option' do + conn_options[:request] = { timeout: 1 } + request_stub.to_timeout + exc = adapter == 'NetHttp' ? Faraday::ConnectionFailed : Faraday::TimeoutError + expect { conn.public_send(http_method, '/') }.to raise_error(exc) + end + + # TODO: This needs reimplementation: see https://github.com/lostisland/faraday/issues/718 + # Should raise Faraday::ConnectionFailed + it 'supports open_timeout option' do + conn_options[:request] = { open_timeout: 1 } + request_stub.to_timeout + exc = adapter == 'NetHttp' ? Faraday::ConnectionFailed : Faraday::TimeoutError + expect { conn.public_send(http_method, '/') }.to raise_error(exc) + end + + on_feature :reason_phrase_parse do + it 'parses the reason phrase' do + request_stub.to_return(status: [200, 'OK']) + expect(response.reason_phrase).to eq('OK') + end + end + + on_feature :compression do + # Accept-Encoding header not sent for HEAD requests as body is not expected in the response. + unless http_method == :head + it 'handles gzip compression' do + request_stub.with(headers: { 'Accept-Encoding' => /\bgzip\b/ }) + conn.public_send(http_method, '/') + end + + it 'handles deflate compression' do + request_stub.with(headers: { 'Accept-Encoding' => /\bdeflate\b/ }) + conn.public_send(http_method, '/') + end + end + end + + on_feature :streaming do + describe 'streaming' do + let(:streamed) { [] } + + context 'when response is empty' do + it 'handles streaming' do + env = nil + conn.public_send(http_method, '/') do |req| + req.options.on_data = proc do |chunk, size, block_env| + streamed << [chunk, size] + env ||= block_env + end + end + + expect(streamed).to eq([['', 0]]) + # TODO: enable this after updating all existing adapters to the new streaming API + # expect(env).to be_a(Faraday::Env) + # expect(env.status).to eq(200) + end + end + + context 'when response contains big data' do + before { request_stub.to_return(body: big_string) } + + it 'handles streaming' do + env = nil + response = conn.public_send(http_method, '/') do |req| + req.options.on_data = proc do |chunk, size, block_env| + streamed << [chunk, size] + env ||= block_env + end + end + + expect(response.body).to eq('') + check_streaming_response(streamed, chunk_size: 16 * 1024) + # TODO: enable this after updating all existing adapters to the new streaming API + # expect(env).to be_a(Faraday::Env) + # expect(env.status).to eq(200) + end + end + end + end + + on_feature :parallel do + context 'with parallel setup' do + before do + @resp1 = nil + @resp2 = nil + @payload1 = { a: '1' } + @payload2 = { b: '2' } + + request_stub + .with({ query_or_body => @payload1 }) + .to_return(body: @payload1.to_json) + + stub_request(http_method, remote) + .with({ query_or_body => @payload2 }) + .to_return(body: @payload2.to_json) + + conn.in_parallel do + @resp1 = conn.public_send(http_method, '/', @payload1) + @resp2 = conn.public_send(http_method, '/', @payload2) + + expect(conn.in_parallel?).to be_truthy + expect(@resp1.body).to be_nil + expect(@resp2.body).to be_nil + end + + expect(conn.in_parallel?).to be_falsey + end + + it 'handles parallel requests status' do + expect(@resp1&.status).to eq(200) + expect(@resp2&.status).to eq(200) + end + + unless http_method == :head && feature?(:skip_response_body_on_head) + it 'handles parallel requests body' do + expect(@resp1&.body).to eq(@payload1.to_json) + expect(@resp2&.body).to eq(@payload2.to_json) + end + end + end + end + + context 'when a proxy is provided as option' do + before do + conn_options[:proxy] = 'http://env-proxy.com:80' + end + + include_examples 'proxy examples' + end + + context 'when http_proxy env variable is set' do + let(:proxy_url) { 'http://env-proxy.com:80' } + + around do |example| + with_env 'http_proxy' => proxy_url do + example.run + end + end + + include_examples 'proxy examples' + + context 'when the env proxy is ignored' do + around do |example| + with_env_proxy_disabled(&example) + end + + include_examples 'proxy examples' + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/streaming_response_checker.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/streaming_response_checker.rb new file mode 100644 index 000000000..8ef259995 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-2.13.4/spec/support/streaming_response_checker.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module Faraday + module StreamingResponseChecker + def check_streaming_response(streamed, options = {}) + opts = { + prefix: '', + streaming?: true + }.merge(options) + + expected_response = opts[:prefix] + big_string + + chunks, sizes = streamed.transpose + + # Check that the total size of the chunks (via the last size returned) + # is the same size as the expected_response + expect(sizes.last).to eq(expected_response.bytesize) + + start_index = 0 + expected_chunks = [] + chunks.each do |actual_chunk| + expected_chunk = expected_response[start_index..((start_index + actual_chunk.bytesize) - 1)] + expected_chunks << expected_chunk + start_index += expected_chunk.bytesize + end + + # it's easier to read a smaller portion, so we check that first + expect(expected_chunks[0][0..255]).to eq(chunks[0][0..255]) + + [expected_chunks, chunks].transpose.each do |expected, actual| + expect(actual).to eq(expected) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/LICENSE.md b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/LICENSE.md new file mode 100644 index 000000000..b7aabc564 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Jan van der Pas + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/README.md b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/README.md new file mode 100644 index 000000000..6d510f4b5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/README.md @@ -0,0 +1,57 @@ +# Faraday Net::HTTP adapter + +This gem is a [Faraday][faraday] adapter for the [Net::HTTP][net-http] library. Faraday is an HTTP client library that provides a common interface over many adapters. Every adapter is defined into it's own gem. This gem defines the adapter for `Net::HTTP` the HTTP library that's included into the standard library of Ruby. + +## Installation + +Add this line to your application's Gemfile: + +```ruby +gem 'faraday-net_http' +``` + +And then execute: + + $ bundle install + +Or install it yourself as: + + $ gem install faraday-net_http + +## Usage + +```ruby +conn = Faraday.new(...) do |f| + f.adapter :net_http do |http| + # yields Net::HTTP + http.verify_callback = lambda do |preverify, cert_store| + # do something here... + end + end +end +``` + +## Development + +After checking out the repo, run `bin/setup` to install dependencies. Then, run `bin/test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. + +To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](rubygems). + +## Contributing + +Bug reports and pull requests are welcome on [GitHub][repo]. + +## License + +The gem is available as open source under the terms of the [license][license]. + +## Code of Conduct + +Everyone interacting in the Faraday Net::HTTP adapter project's codebases, issue trackers, chat rooms and mailing lists is expected to follow the [code of conduct][code-of-conduct]. + +[faraday]: https://github.com/lostisland/faraday +[net-http]: https://ruby-doc.org/stdlib-2.7.0/libdoc/net/http/rdoc/Net/HTTP.html +[rubygems]: https://rubygems.org +[repo]: https://github.com/lostisland/faraday-net_http +[license]: https://github.com/lostisland/faraday-net_http/blob/main/LICENSE.md +[code-of-conduct]: https://github.com/lostisland/faraday-net_http/blob/main/CODE_OF_CONDUCT.md diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/adapter/net_http.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/adapter/net_http.rb new file mode 100644 index 000000000..871bd8668 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/adapter/net_http.rb @@ -0,0 +1,206 @@ +# frozen_string_literal: true + +begin + require 'net/https' +rescue LoadError + warn 'Warning: no such file to load -- net/https. ' \ + 'Make sure openssl is installed if you want ssl support' + require 'net/http' +end +require 'zlib' + +module Faraday + class Adapter + class NetHttp < Faraday::Adapter + exceptions = [ + IOError, + Errno::EADDRNOTAVAIL, + Errno::EALREADY, + Errno::ECONNABORTED, + Errno::ECONNREFUSED, + Errno::ECONNRESET, + Errno::EHOSTUNREACH, + Errno::EINVAL, + Errno::ENETUNREACH, + Errno::EPIPE, + Net::HTTPBadResponse, + Net::HTTPHeaderSyntaxError, + Net::ProtocolError, + SocketError, + Zlib::GzipFile::Error + ] + + exceptions << ::OpenSSL::SSL::SSLError if defined?(::OpenSSL::SSL::SSLError) + exceptions << ::Net::OpenTimeout if defined?(::Net::OpenTimeout) + + NET_HTTP_EXCEPTIONS = exceptions.freeze + + def initialize(app = nil, opts = {}, &block) + @ssl_cert_store = nil + super(app, opts, &block) + end + + def build_connection(env) + net_http_connection(env).tap do |http| + configure_ssl(http, env[:ssl]) if env[:url].scheme == 'https' && env[:ssl] + configure_request(http, env[:request]) + end + end + + def net_http_connection(env) + proxy = env[:request][:proxy] + port = env[:url].port || (env[:url].scheme == 'https' ? 443 : 80) + if proxy + Net::HTTP.new(env[:url].hostname, port, + proxy[:uri].hostname, proxy[:uri].port, + proxy[:user], proxy[:password], + nil, proxy[:uri].scheme == 'https') + else + Net::HTTP.new(env[:url].hostname, port, nil) + end + end + + def call(env) + super + connection(env) do |http| + perform_request(http, env) + rescue *NET_HTTP_EXCEPTIONS => e + raise Faraday::SSLError, e if defined?(OpenSSL) && e.is_a?(OpenSSL::SSL::SSLError) + + raise Faraday::ConnectionFailed, e + end + @app.call env + rescue Timeout::Error, Errno::ETIMEDOUT => e + raise Faraday::TimeoutError, e + end + + private + + def create_request(env) + request = Net::HTTPGenericRequest.new \ + env[:method].to_s.upcase, # request method + !!env[:body], # is there request body + env[:method] != :head, # is there response body + env[:url].request_uri, # request uri path + env[:request_headers] # request headers + + if env[:body].respond_to?(:read) + request.body_stream = env[:body] + else + request.body = env[:body] + end + request + end + + def perform_request(http, env) + if env.stream_response? + http_response = env.stream_response do |&on_data| + request_with_wrapped_block(http, env, &on_data) + end + http_response.body = nil + else + http_response = request_with_wrapped_block(http, env) + end + env.response_body = encoded_body(http_response) + env.response.finish(env) + http_response + end + + def request_with_wrapped_block(http, env, &block) + # Must use Net::HTTP#start and pass it a block otherwise the server's + # TCP socket does not close correctly. + http.start do |opened_http| + opened_http.request create_request(env) do |response| + save_http_response(env, response) + + response.read_body(&block) if block_given? + end + end + end + + def save_http_response(env, http_response) + save_response( + env, http_response.code.to_i, nil, nil, http_response.message, finished: false + ) do |response_headers| + http_response.each_header do |key, value| + response_headers[key] = value + end + end + end + + def configure_ssl(http, ssl) + http.use_ssl = true if http.respond_to?(:use_ssl=) + + http.verify_mode = ssl_verify_mode(ssl) + http.cert_store = ssl_cert_store(ssl) + + cert, *extra_chain_cert = ssl[:client_cert] + http.cert = cert if cert + http.extra_chain_cert = extra_chain_cert if extra_chain_cert.any? + + http.key = ssl[:client_key] if ssl[:client_key] + http.ca_file = ssl[:ca_file] if ssl[:ca_file] + http.ca_path = ssl[:ca_path] if ssl[:ca_path] + http.verify_depth = ssl[:verify_depth] if ssl[:verify_depth] + http.ssl_version = ssl[:version] if ssl[:version] + http.min_version = ssl[:min_version] if ssl[:min_version] + http.max_version = ssl[:max_version] if ssl[:max_version] + http.verify_hostname = ssl[:verify_hostname] if verify_hostname_enabled?(http, ssl) + http.ciphers = ssl[:ciphers] if ssl[:ciphers] + end + + def configure_request(http, req) + if (sec = request_timeout(:read, req)) + http.read_timeout = sec + end + + if (sec = http.respond_to?(:write_timeout=) && + request_timeout(:write, req)) + http.write_timeout = sec + end + + if (sec = request_timeout(:open, req)) + http.open_timeout = sec + end + + # Only set if Net::Http supports it, since Ruby 2.5. + http.max_retries = 0 if http.respond_to?(:max_retries=) + + @config_block&.call(http) + end + + def ssl_cert_store(ssl) + return ssl[:cert_store] if ssl[:cert_store] + + # Use the default cert store by default, i.e. system ca certs + @ssl_cert_store ||= OpenSSL::X509::Store.new.tap(&:set_default_paths) + end + + def ssl_verify_mode(ssl) + ssl[:verify_mode] || begin + if ssl.fetch(:verify, true) + OpenSSL::SSL::VERIFY_PEER + else + OpenSSL::SSL::VERIFY_NONE + end + end + end + + def encoded_body(http_response) + body = http_response.body || +'' + /\bcharset=([^;]+)/.match(http_response['Content-Type']) do |match| + content_charset = ::Encoding.find(match[1].strip) + body = body.dup if body.frozen? + body.force_encoding(content_charset) + rescue ArgumentError + nil + end + body + end + + def verify_hostname_enabled?(http, ssl) + http.respond_to?(:verify_hostname=) && ssl.key?(:verify_hostname) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http.rb new file mode 100644 index 000000000..e4048e89a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +require 'faraday/adapter/net_http' +require 'faraday/net_http/version' + +module Faraday + module NetHttp + Faraday::Adapter.register_middleware(net_http: Faraday::Adapter::NetHttp) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http/version.rb b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http/version.rb new file mode 100644 index 000000000..813fef185 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/faraday-net_http-3.4.1/lib/faraday/net_http/version.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module Faraday + module NetHttp + VERSION = '3.4.1' + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/BSDL b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/BSDL new file mode 100644 index 000000000..66d93598a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/BSDL @@ -0,0 +1,22 @@ +Copyright (C) 1993-2013 Yukihiro Matsumoto. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/CHANGES.md b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/CHANGES.md new file mode 100644 index 000000000..13b7e279f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/CHANGES.md @@ -0,0 +1,660 @@ +# Changes + +### Unreleased + +### 2025-07-28 (2.13.2) + +* Improve duplicate key warning and errors to include the key name and point to the right caller. + +### 2025-07-24 (2.13.1) + +* Fix support for older compilers without `__builtin_cpu_supports`. + +### 2025-07-17 (2.13.0) + +* Add new `allow_duplicate_key` parsing options. By default a warning is now emitted when a duplicated key is encountered. + In `json 3.0` an error will be raised. +* Optimize parsing further using SIMD to scan strings. + +### 2025-05-23 (2.12.2) + +* Fix compiler optimization level. + +### 2025-05-23 (2.12.1) + +* Fix a potential crash in large negative floating point number generation. +* Fix for JSON.pretty_generate to use passed state object's generate instead of state class as the required parameters aren't available. + +### 2025-05-12 (2.12.0) + +* Improve floating point generation to not use scientific notation as much. +* Include line and column in parser errors. Both in the message and as exception attributes. +* Handle non-string hash keys with broken `to_s` implementations. +* `JSON.generate` now uses SSE2 (x86) or NEON (arm64) instructions when available to escape strings. + +### 2025-04-25 (2.11.3) + +* Fix a regression in `JSON.pretty_generate` that could cause indentation to be off once some `#to_json` has been called. + +### 2025-04-24 (2.11.2) + +* Add back `JSON::PRETTY_STATE_PROTOTYPE`. This constant was private API but is used by popular gems like `multi_json`. + It now emits a deprecation warning. + +### 2025-04-24 (2.11.1) + +* Add back `JSON.restore`, `JSON.unparse`, `JSON.fast_unparse` and `JSON.pretty_unparse`. + These were deprecated 16 years ago, but never emited warnings, only undocumented, so are + still used by a few gems. + +### 2025-04-24 (2.11.0) + +* Optimize Integer generation to be ~1.8x faster. +* Optimize Float generation to be ~10x faster. +* Fix `JSON.load` proc argument to substitute the parsed object with the return value. + This better match `Marshal.load` behavior. +* Deprecate `JSON.fast_generate` (it's not any faster, so pointless). +* Deprecate `JSON.load_default_options`. +* Deprecate `JSON.unsafe_load_default_options`. +* Deprecate `JSON.dump_default_options`. +* Deprecate `Kernel#j` +* Deprecate `Kernel#jj` +* Remove outdated `JSON.iconv`. +* Remove `Class#json_creatable?` monkey patch. +* Remove deprecated `JSON.restore` method. +* Remove deprecated `JSON.unparse` method. +* Remove deprecated `JSON.fast_unparse` method. +* Remove deprecated `JSON.pretty_unparse` method. +* Remove deprecated `JSON::UnparserError` constant. +* Remove outdated `JSON::MissingUnicodeSupport` constant. + +### 2025-03-12 (2.10.2) + +* Fix a potential crash in the C extension parser. +* Raise a ParserError on all incomplete unicode escape sequence. This was the behavior until `2.10.0` unadvertently changed it. +* Ensure document snippets that are included in parser errors don't include truncated multibyte characters. +* Ensure parser error snippets are valid UTF-8. +* Fix `JSON::GeneratorError#detailed_message` on Ruby < 3.2 + +### 2025-02-10 (2.10.1) + +* Fix a compatibility issue with `MultiJson.dump(obj, pretty: true)`: `no implicit conversion of false into Proc (TypeError)`. + +### 2025-02-10 (2.10.0) + +* `strict: true` now accept symbols as values. Previously they'd only be accepted as hash keys. +* The C extension Parser has been entirely reimplemented from scratch. +* Introduced `JSON::Coder` as a new API allowing to customize how non native types are serialized in a non-global way. +* Introduced `JSON::Fragment` to allow assembling cached fragments in a safe way. +* The Java implementation of the generator received many optimizations. + +### 2024-12-18 (2.9.1) + +* Fix support for Solaris 10. + +### 2024-12-03 (2.9.0) + +* Fix C implementation of `script_safe` escaping to not confuse some other 3 wide characters with `\u2028` and `\u2029`. + e.g. `JSON.generate(["倩", "瀨"], script_safe: true)` would generate the wrong JSON. +* `JSON.dump(object, some_io)` now write into the IO in chunks while previously it would buffer the entire JSON before writing. +* `JSON::GeneratorError` now has a `#invalid_object` attribute, making it easier to understand why an object tree cannot be serialized. +* Numerous improvements to the JRuby extension. + +### 2024-11-14 (2.8.2) + +* `JSON.load_file` explictly read the file as UTF-8. + +### 2024-11-06 (2.8.1) + +* Fix the java packages to include the extension. + +### 2024-11-06 (2.8.0) + +* Emit a deprecation warning when `JSON.load` create custom types without the `create_additions` option being explictly enabled. + * Prefer to use `JSON.unsafe_load(string)` or `JSON.load(string, create_additions: true)`. +* Emit a deprecation warning when serializing valid UTF-8 strings encoded in `ASCII_8BIT` aka `BINARY`. +* Bump required Ruby version to 2.7. +* Add support for optionally parsing trailing commas, via `allow_trailing_comma: true`, which in cunjunction with the + pre-existing support for comments, make it suitable to parse `jsonc` documents. +* Many performance improvements to `JSON.parse` and `JSON.load`, up to `1.7x` faster on real world documents. +* Some minor performance improvements to `JSON.dump` and `JSON.generate`. +* `JSON.pretty_generate` no longer include newline inside empty object and arrays. + +### 2024-11-04 (2.7.6) + +* Fix a regression in JSON.generate when dealing with Hash keys that are string subclasses, call `to_json` on them. + +### 2024-10-25 (2.7.5) + +* Fix a memory leak when `#to_json` methods raise an exception. +* Gracefully handle formatting configs being set to `nil` instead of `""`. +* Workaround another issue caused by conflicting versions of both `json_pure` and `json` being loaded. + +### 2024-10-25 (2.7.4) + +* Workaround a bug in 3.4.8 and older https://github.com/rubygems/rubygems/pull/6490. + This bug would cause some gems with native extension to fail during compilation. +* Workaround different versions of `json` and `json_pure` being loaded (not officially supported). +* Make `json_pure` Ractor compatible. + +### 2024-10-24 (2.7.3) + +* Numerous performance optimizations in `JSON.generate` and `JSON.dump` (up to 2 times faster). +* Limit the size of ParserError exception messages, only include up to 32 bytes of the unparseable source. +* Fix json-pure's `Object#to_json` to accept non state arguments +* Fix multiline comment support in `json-pure`. +* Fix `JSON.parse` to no longer mutate the argument encoding when passed an ASCII-8BIT string. +* Fix `String#to_json` to raise on invalid encoding in `json-pure`. +* Delete code that was based on CVTUTF. +* Use the pure-Ruby generator on TruffleRuby. +* Fix `strict` mode in `json-pure` to not break on Integer. + +### 2024-04-04 (2.7.2) + +* Use rb_sym2str instead of SYM2ID #561 +* Fix memory leak when exception is raised during JSON generation #574 +* Remove references to "19" methods in JRuby #576 +* Make OpenStruct support as optional by @hsbt in #565 +* Autoload JSON::GenericObject to avoid require ostruct warning in Ruby 3.4 #577 +* Warn to install ostruct if json couldn't load it by @hsbt #578 + +### 2023-12-05 (2.7.1) + +* JSON.dump: handle unenclosed hashes regression #554 +* Overload kwargs in JSON.dump #556 +* [DOC] RDoc for additions #557 +* Fix JSON.dump overload combination #558 + +### 2023-12-01 (2.7.0) + +* Add a strict option to Generator #519 +* `escape_slash` option was renamed as `script_safe` and now also escape U+2028 and U+2029. `escape_slash` is now an alias of `script_safe` #525 +* Remove unnecessary initialization of create_id in JSON.parse() #454 +* Improvements to Hash#to_json in pure implementation generator #203 +* Use ruby_xfree to free buffers #518 +* Fix "unexpected token" offset for Infinity #507 +* Avoid using deprecated BigDecimal.new on JRuby #546 +* Removed code for Ruby 1.8 #540 +* Rename JSON::ParseError to JSON:ParserError #530 +* Call super in included hook #486 +* JRuby requires a minimum of Java 8 #516 +* Always indent even if empty #517 + +### 2022-11-30 (2.6.3) + +* bugfix json/pure mixing escaped with literal unicode raises Encoding::CompatibilityError #483 +* Stop including the parser source __LINE__ in exceptions #470 + +### 2022-11-17 (2.6.2) + +* Remove unknown keyword arg from DateTime.parse #488 +* Ignore java artifacts by @hsbt #489 +* Fix parser bug for empty string allocation #496 + +### 2021-10-24 (2.6.1) + +* Restore version.rb with 2.6.1 + +### 2021-10-14 (2.6.0) + +* Use `rb_enc_interned_str` if available to reduce allocations in `freeze: true` mode. #451. +* Bump required_ruby_version to 2.3. +* Fix compatibility with `GC.compact`. +* Fix some compilation warnings. #469 + +## 2020-12-22 (2.5.1) + +* Restore the compatibility for constants of JSON class. + +## 2020-12-22 (2.5.0) + +* Ready to Ractor-safe at Ruby 3.0. + +## 2020-12-17 (2.4.1) + +* Restore version.rb with 2.4.1 + +## 2020-12-15 (2.4.0) + +* Implement a freeze: parser option #447 +* Fix an issue with generate_pretty and empty objects in the Ruby and Java implementations #449 +* Fix JSON.load_file doc #448 +* Fix pure parser with unclosed arrays / objects #425 +* bundle the LICENSE file in the gem #444 +* Add an option to escape forward slash character #405 +* RDoc for JSON #439 #446 #442 #434 #433 #430 + +## 2020-06-30 (2.3.1) + +* Spelling and grammar fixes for comments. Pull request #191 by Josh + Kline. +* Enhance generic JSON and #generate docs. Pull request #347 by Victor + Shepelev. +* Add :nodoc: for GeneratorMethods. Pull request #349 by Victor Shepelev. +* Baseline changes to help (JRuby) development. Pull request #371 by Karol + Bucek. +* Add metadata for rubygems.org. Pull request #379 by Alexandre ZANNI. +* Remove invalid JSON.generate description from JSON module rdoc. Pull + request #384 by Jeremy Evans. +* Test with TruffleRuby in CI. Pull request #402 by Benoit Daloze. +* Rdoc enhancements. Pull request #413 by Burdette Lamar. +* Fixtures/ are not being tested... Pull request #416 by Marc-André + Lafortune. +* Use frozen string for hash key. Pull request #420 by Marc-André + Lafortune. +* Added :call-seq: to RDoc for some methods. Pull request #422 by Burdette + Lamar. +* Small typo fix. Pull request #423 by Marc-André Lafortune. + +## 2019-12-11 (2.3.0) + * Fix default of `create_additions` to always be `false` for `JSON(user_input)` + and `JSON.parse(user_input, nil)`. + Note that `JSON.load` remains with default `true` and is meant for internal + serialization of trusted data. [CVE-2020-10663] + * Fix passing args all #to_json in json/add/*. + * Fix encoding issues + * Fix issues of keyword vs positional parameter + * Fix JSON::Parser against bigdecimal updates + * Bug fixes to JRuby port + +## 2019-02-21 (2.2.0) + * Adds support for 2.6 BigDecimal and ruby standard library Set datetype. + +## 2017-04-18 (2.1.0) + * Allow passing of `decimal_class` option to specify a class as which to parse + JSON float numbers. +## 2017-03-23 (2.0.4) + * Raise exception for incomplete unicode surrogates/character escape + sequences. This problem was reported by Daniel Gollahon (dgollahon). + * Fix arbitrary heap exposure problem. This problem was reported by Ahmad + Sherif (ahmadsherif). + +## 2017-01-12 (2.0.3) + * Set `required_ruby_version` to 1.9 + * Some small fixes + +## 2016-07-26 (2.0.2) + * Specify `required_ruby_version` for json\_pure. + * Fix issue #295 failure when parsing frozen strings. + +## 2016-07-01 (2.0.1) + * Fix problem when requiring json\_pure and Parser constant was defined top + level. + * Add `RB_GC_GUARD` to avoid possible GC problem via Pete Johns. + * Store `current_nesting` on stack by Aaron Patterson. + +## 2015-09-11 (2.0.0) + * Now complies to newest JSON RFC 7159. + * Implements compatibility to ruby 2.4 integer unification. + * Drops support for old rubies whose life has ended, that is rubies < 2.0. + Also see https://www.ruby-lang.org/en/news/2014/07/01/eol-for-1-8-7-and-1-9-2/ + * There were still some mentions of dual GPL licensing in the source, but JSON + has just the Ruby license that itself includes an explicit dual-licensing + clause that allows covered software to be distributed under the terms of + the Simplified BSD License instead for all ruby versions >= 1.9.3. This is + however a GPL compatible license according to the Free Software Foundation. + I changed these mentions to be consistent with the Ruby license setting in + the gemspec files which were already correct now. + +## 2017-01-13 (1.8.6) + * Be compatible with ancient ruby 1.8 (maybe?) + +## 2015-09-11 (1.8.5) + * Be compatible with ruby 2.4.0 + * There were still some mentions of dual GPL licensing in the source, but JSON + has just the Ruby license that itself includes an explicit dual-licensing + clause that allows covered software to be distributed under the terms of + the Simplified BSD License instead for all ruby versions >= 1.9.3. This is + however a GPL compatible license according to the Free Software Foundation. + I changed these mentions to be consistent with the Ruby license setting in + the gemspec files which were already correct now. + +## 2015-06-01 (1.8.3) + * Fix potential memory leak, thx to nobu. + +## 2015-01-08 (1.8.2) + * Some performance improvements by Vipul A M . + * Fix by Jason R. Clark to avoid mutation of + `JSON.dump_default_options`. + * More tests by Michael Mac-Vicar and fixing + `space_before` accessor in generator. + * Performance on Jruby improved by Ben Browning . + * Some fixes to be compatible with the new Ruby 2.2 by Zachary Scott + and SHIBATA Hiroshi . + +## 2013-05-13 (1.8.1) + * Remove Rubinius exception since transcoding should be working now. + +## 2013-05-13 (1.8.0) + * Fix https://github.com/ruby/json/issues/162 reported by Marc-Andre + Lafortune . Thanks! + * Applied patches by Yui NARUSE to suppress warning with + -Wchar-subscripts and better validate UTF-8 strings. + * Applied patch by ginriki@github to remove unnecessary if. + * Add load/dump interface to `JSON::GenericObject` to make + serialize :some_attribute, `JSON::GenericObject` + work in Rails active models for convenient `SomeModel#some_attribute.foo.bar` + access to serialised JSON data. + +## 2013-02-04 (1.7.7) + * Security fix for JSON create_additions default value and + `JSON::GenericObject`. It should not be possible to create additions unless + explicitly requested by setting the create_additions argument to true or + using the JSON.load/dump interface. If `JSON::GenericObject` is supposed to + be automatically deserialised, this has to be explicitly enabled by + setting + JSON::GenericObject.json_creatable = true + as well. + * Remove useless assert in fbuffer implementation. + * Apply patch attached to https://github.com/ruby/json/issues#issue/155 + provided by John Shahid , Thx! + * Add license information to rubygems spec data, reported by Jordi Massaguer Pla . + * Improve documentation, thx to Zachary Scott . + +## 2012-11-29 (1.7.6) + * Add `GeneratorState#merge` alias for JRuby, fix state accessor methods. Thx to + jvshahid@github. + * Increase hash likeness of state objects. + +## 2012-08-17 (1.7.5) + * Fix compilation of extension on older rubies. + +## 2012-07-26 (1.7.4) + * Fix compilation problem on AIX, see https://github.com/ruby/json/issues/142 + +## 2012-05-12 (1.7.3) + * Work around Rubinius encoding issues using iconv for conversion instead. + +## 2012-05-11 (1.7.2) + * Fix some encoding issues, that cause problems for the pure and the + extension variant in jruby 1.9 mode. + +## 2012-04-28 (1.7.1) + * Some small fixes for building + +## 2012-04-28 (1.7.0) + * Add `JSON::GenericObject` for method access to objects transmitted via JSON. + +## 2012-04-27 (1.6.7) + * Fix possible crash when trying to parse nil value. + +## 2012-02-11 (1.6.6) + * Propagate src encoding to values made from it (fixes 1.9 mode converting + everything to ascii-8bit; harmless for 1.8 mode too) (Thomas E. Enebo + ), should fix + https://github.com/ruby/json/issues#issue/119. + * Fix https://github.com/ruby/json/issues#issue/124 Thx to Jason Hutchens. + * Fix https://github.com/ruby/json/issues#issue/117 + +## 2012-01-15 (1.6.5) + * Vit Ondruch reported a bug that shows up when using + optimisation under GCC 4.7. Thx to him, Bohuslav Kabrda + and Yui NARUSE for debugging and + developing a patch fix. + +## 2011-12-24 (1.6.4) + * Patches that improve speed on JRuby contributed by Charles Oliver Nutter + . + * Support `object_class`/`array_class` with duck typed hash/array. + +## 2011-12-01 (1.6.3) + * Let `JSON.load('')` return nil as well to make mysql text columns (default to + `''`) work better for serialization. + +## 2011-11-21 (1.6.2) + * Add support for OpenStruct and BigDecimal. + * Fix bug when parsing nil in `quirks_mode`. + * Make JSON.dump and JSON.load methods better cooperate with Rails' serialize + method. Just use: + serialize :value, JSON + * Fix bug with time serialization concerning nanoseconds. Thanks for the + patch go to Josh Partlow (jpartlow@github). + * Improve parsing speed for JSON numbers (integers and floats) in a similar way to + what Evan Phoenix suggested in: + https://github.com/ruby/json/pull/103 + +## 2011-09-18 (1.6.1) + * Using -target 1.5 to force Java bits to compile with 1.5. + +## 2011-09-12 (1.6.0) + * Extract utilities (prettifier and GUI-editor) in its own gem json-utils. + * Split json/add/core into different files for classes to be serialised. + +## 2011-08-31 (1.5.4) + * Fix memory leak when used from multiple JRuby. (Patch by + jfirebaugh@github). + * Apply patch by Eric Wong that fixes garbage collection problem + reported in https://github.com/ruby/json/issues/46. + * Add :quirks_mode option to parser and generator. + * Add support for Rational and Complex number additions via json/add/complex + and json/add/rational requires. + +## 2011-06-20 (1.5.3) + * Alias State#configure method as State#merge to increase duck type synonymy with Hash. + * Add `as_json` methods in json/add/core, so rails can create its json objects the new way. + +## 2011-05-11 (1.5.2) + * Apply documentation patch by Cory Monty . + * Add gemspecs for json and json\_pure. + * Fix bug in jruby pretty printing. + * Fix bug in `object_class` and `array_class` when inheriting from Hash or + Array. + +## 2011-01-24 (1.5.1) + * Made rake-compiler build a fat binary gem. This should fix issue + https://github.com/ruby/json/issues#issue/54. + +## 2011-01-22 (1.5.0) + * Included Java source codes for the Jruby extension made by Daniel Luz + . + * Output full exception message of `deep_const_get` to aid debugging. + * Fixed an issue with ruby 1.9 `Module#const_defined?` method, that was + reported by Riley Goodside. + +## 2010-08-09 (1.4.6) + * Fixed oversight reported in http://github.com/ruby/json/issues/closed#issue/23, + always create a new object from the state prototype. + * Made pure and ext api more similar again. + +## 2010-08-07 (1.4.5) + * Manage data structure nesting depth in state object during generation. This + should reduce problems with `to_json` method definіtions that only have one + argument. + * Some fixes in the state objects and additional tests. +## 2010-08-06 (1.4.4) + * Fixes build problem for rubinius under OS X, http://github.com/ruby/json/issues/closed#issue/25 + * Fixes crashes described in http://github.com/ruby/json/issues/closed#issue/21 and + http://github.com/ruby/json/issues/closed#issue/23 +## 2010-05-05 (1.4.3) + * Fixed some test assertions, from Ruby r27587 and r27590, patch by nobu. + * Fixed issue http://github.com/ruby/json/issues/#issue/20 reported by + electronicwhisper@github. Thx! + +## 2010-04-26 (1.4.2) + * Applied patch from naruse Yui NARUSE to make building with + Microsoft Visual C possible again. + * Applied patch from devrandom in order to allow building of + json_pure if extensiontask is not present. + * Thanks to Dustin Schneider , who reported a memory + leak, which is fixed in this release. + * Applied 993f261ccb8f911d2ae57e9db48ec7acd0187283 patch from josh@github. + +## 2010-04-25 (1.4.1) + * Fix for a bug reported by Dan DeLeo , caused by T_FIXNUM + being different on 32bit/64bit architectures. + +## 2010-04-23 (1.4.0) + * Major speed improvements and building with simplified + directory/file-structure. + * Extension should at least be compatible with MRI, YARV and Rubinius. + +## 2010-04-07 (1.2.4) + * Trigger const_missing callback to make Rails' dynamic class loading work. + +## 2010-03-11 (1.2.3) + * Added a `State#[]` method which returns an attribute's value in order to + increase duck type compatibility to Hash. + +## 2010-02-27 (1.2.2) + * Made some changes to make the building of the parser/generator compatible + to Rubinius. + +## 2009-11-25 (1.2.1) + * Added `:symbolize_names` option to Parser, which returns symbols instead of + strings in object names/keys. + +## 2009-10-01 (1.2.0) + * `fast_generate` now raises an exception for nan and infinite floats. + * On Ruby 1.8 json supports parsing of UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, + and UTF-32LE JSON documents now. Under Ruby 1.9 the M17n conversion + functions are used to convert from all supported encodings. ASCII-8BIT + encoded strings are handled like all strings under Ruby 1.8 were. + * Better documentation + +## 2009-08-23 (1.1.9) + * Added forgotten main doc file `extra_rdoc_files`. + +## 2009-08-23 (1.1.8) + * Applied a patch by OZAWA Sakuro to make json/pure + work in environments that don't provide iconv. + * Applied patch by okkez_ in order to fix Ruby Bug #1768: + http://redmine.ruby-lang.org/issues/show/1768. + * Finally got around to avoid the rather paranoid escaping of ?/ characters + in the generator's output. The parsers aren't affected by this change. + Thanks to Rich Apodaca for the suggestion. + +## 2009-06-29 (1.1.7) + * Security Fix for JSON::Pure::Parser. A specially designed string could + cause catastrophic backtracking in one of the parser's regular expressions + in earlier 1.1.x versions. JSON::Ext::Parser isn't affected by this issue. + Thanks to Bartosz Blimke for reporting this + problem. + * This release also uses a less strict ruby version requirement for the + creation of the mswin32 native gem. + +## 2009-05-10 (1.1.6) + * No changes. І tested native linux gems in the last release and they don't + play well with different ruby versions other than the one the gem was built + with. This release is just to bump the version number in order to skip the + native gem on rubyforge. + +## 2009-05-10 (1.1.5) + * Started to build gems with rake-compiler gem. + * Applied patch object/array class patch from Brian Candler + and fixes. + +## 2009-04-01 (1.1.4) + * Fixed a bug in the creation of serialized generic rails objects reported by + Friedrich Graeter . + * Deleted tests/runner.rb, we're using testrb instead. + * Editor supports Infinity in numbers now. + * Made some changes in order to get the library to compile/run under Ruby + 1.9. + * Improved speed of the code path for the fast_generate method in the pure + variant. + +## 2008-07-10 (1.1.3) + * Wesley Beary reported a bug in json/add/core's DateTime + handling: If the nominator and denominator of the offset were divisible by + each other Ruby's Rational#to_s returns them as an integer not a fraction + with '/'. This caused a ZeroDivisionError during parsing. + * Use Date#start and DateTime#start instead of sg method, while + remaining backwards compatible. + * Supports ragel >= 6.0 now. + * Corrected some tests. + * Some minor changes. + +## 2007-11-27 (1.1.2) + * Remember default dir (last used directory) in editor. + * JSON::Editor.edit method added, the editor can now receive json texts from + the clipboard via C-v. + * Load json texts from an URL pasted via middle button press. + * Added :create_additions option to Parser. This makes it possible to disable + the creation of additions by force, in order to treat json texts as data + while having additions loaded. + * Jacob Maine reported, that JSON(:foo) outputs a JSON + object if the rails addition is enabled, which is wrong. It now outputs a + JSON string "foo" instead, like suggested by Jacob Maine. + * Discovered a bug in the Ruby Bugs Tracker on rubyforge, that was reported + by John Evans lgastako@gmail.com. He could produce a crash in the JSON + generator by returning something other than a String instance from a + to_json method. I now guard against this by doing a rather crude type + check, which raises an exception instead of crashing. + +## 2007-07-06 (1.1.1) + * Yui NARUSE sent some patches to fix tests for Ruby + 1.9. I applied them and adapted some of them a bit to run both on 1.8 and + 1.9. + * Introduced a `JSON.parse!` method without depth checking for people who + like danger. + * Made generate and `pretty_generate` methods configurable by an options hash. + * Added :allow_nan option to parser and generator in order to handle NaN, + Infinity, and -Infinity correctly - if requested. Floats, which aren't numbers, + aren't valid JSON according to RFC4627, so by default an exception will be + raised if any of these symbols are encountered. Thanks to Andrea Censi + for his hint about this. + * Fixed some more tests for Ruby 1.9. + * Implemented dump/load interface of Marshal as suggested in ruby-core:11405 + by murphy . + * Implemented the `max_nesting` feature for generate methods, too. + * Added some implementations for ruby core's custom objects for + serialisation/deserialisation purposes. + +## 2007-05-21 (1.1.0) + * Implemented max_nesting feature for parser to avoid stack overflows for + data from untrusted sources. If you trust the source, you can disable it + with the option max_nesting => false. + * Piers Cawley reported a bug, that not every + character can be escaped by `\` as required by RFC4627. There's a + contradiction between David Crockford's JSON checker test vectors (in + tests/fixtures) and RFC4627, though. I decided to stick to the RFC, because + the JSON checker seems to be a bit older than the RFC. + * Extended license to Ruby License, which includes the GPL. + * Added keyboard shortcuts, and 'Open location' menu item to edit_json.rb. + +## 2007-05-09 (1.0.4) + * Applied a patch from Yui NARUSE to make JSON compile + under Ruby 1.9. Thank you very much for mailing it to me! + * Made binary variants of JSON fail early, instead of falling back to the + pure version. This should avoid overshadowing of eventual problems while + loading of the binary. + +## 2007-03-24 (1.0.3) + * Improved performance of pure variant a bit. + * The ext variant of this release supports the mswin32 platform. Ugh! + +## 2007-03-24 (1.0.2) + * Ext Parser didn't parse 0e0 correctly into 0.0: Fixed! + +## 2007-03-24 (1.0.1) + * Forgot some object files in the build dir. I really like that - not! + +## 2007-03-24 (1.0.0) + * Added C implementations for the JSON generator and a ragel based JSON + parser in C. + * Much more tests, especially fixtures from json.org. + * Further improved conformance to RFC4627. + +## 2007-02-09 (0.4.3) + * Conform more to RFC4627 for JSON: This means JSON strings + now always must contain exactly one object `"{ ... }"` or array `"[ ... ]"` in + order to be parsed without raising an exception. The definition of what + constitutes a whitespace is narrower in JSON than in Ruby ([ \t\r\n]), and + there are differences in floats and integers (no octals or hexadecimals) as + well. + * Added aliases generate and `pretty_generate` of unparse and `pretty_unparse`. + * Fixed a test case. + * Catch an `Iconv::InvalidEncoding` exception, that seems to occur on some Sun + boxes with SunOS 5.8, if iconv doesn't support utf16 conversions. This was + reported by Andrew R Jackson , thanks a bunch! + +## 2006-08-25 (0.4.2) + * Fixed a bug in handling solidi (/-characters), that was reported by + Kevin Gilpin . + +## 2006-02-06 (0.4.1) + * Fixed a bug related to escaping with backslashes. Thanks for the report go + to Florian Munz . + +## 2005-09-23 (0.4.0) + * Initial Rubyforge Version diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/COPYING b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/COPYING new file mode 100644 index 000000000..426810a7f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/COPYING @@ -0,0 +1,56 @@ +Ruby is copyrighted free software by Yukihiro Matsumoto . +You can redistribute it and/or modify it under either the terms of the +2-clause BSDL (see the file BSDL), or the conditions below: + + 1. You may make and give away verbatim copies of the source form of the + software without restriction, provided that you duplicate all of the + original copyright notices and associated disclaimers. + + 2. You may modify your copy of the software in any way, provided that + you do at least ONE of the following: + + a) place your modifications in the Public Domain or otherwise + make them Freely Available, such as by posting said + modifications to Usenet or an equivalent medium, or by allowing + the author to include your modifications in the software. + + b) use the modified software only within your corporation or + organization. + + c) give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d) make other distribution arrangements with the author. + + 3. You may distribute the software in object code or binary form, + provided that you do at least ONE of the following: + + a) distribute the binaries and library files of the software, + together with instructions (in the manual page or equivalent) + on where to get the original distribution. + + b) accompany the distribution with the machine-readable source of + the software. + + c) give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d) make other distribution arrangements with the author. + + 4. You may modify and include the part of the software into any other + software (possibly commercial). But some files in the distribution + are not written by the author, so that they are not under these terms. + + For the list of those files and their copying conditions, see the + file LEGAL. + + 5. The scripts and library files supplied as input to or produced as + output from the software do not automatically fall under the + copyright of the software, but belong to whomever generated them, + and may be sold commercially, and may be aggregated with this + software. + + 6. THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/LEGAL b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/LEGAL new file mode 100644 index 000000000..737d18cb8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/LEGAL @@ -0,0 +1,8 @@ +# -*- rdoc -*- + += LEGAL NOTICE INFORMATION +-------------------------- + +All the files in this distribution are covered under either the Ruby's +license (see the file COPYING) or public-domain except some files +mentioned below. diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/README.md b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/README.md new file mode 100644 index 000000000..119327213 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/README.md @@ -0,0 +1,281 @@ +# JSON implementation for Ruby + +[![CI](https://github.com/ruby/json/actions/workflows/ci.yml/badge.svg)](https://github.com/ruby/json/actions/workflows/ci.yml) + +## Description + +This is an implementation of the JSON specification according to RFC 7159 +http://www.ietf.org/rfc/rfc7159.txt . + +The JSON generator generate UTF-8 character sequences by default. +If an :ascii\_only option with a true value is given, they escape all +non-ASCII and control characters with \uXXXX escape sequences, and support +UTF-16 surrogate pairs in order to be able to generate the whole range of +unicode code points. + +All strings, that are to be encoded as JSON strings, should be UTF-8 byte +sequences on the Ruby side. To encode raw binary strings, that aren't UTF-8 +encoded, please use the to\_json\_raw\_object method of String (which produces +an object, that contains a byte array) and decode the result on the receiving +endpoint. + +## Installation + +Install the gem and add to the application's Gemfile by executing: + + $ bundle add json + +If bundler is not being used to manage dependencies, install the gem by executing: + + $ gem install json + +## Basic Usage + +To use JSON you can + +```ruby +require 'json' +``` + +Now you can parse a JSON document into a ruby data structure by calling + +```ruby +JSON.parse(document) +``` + +If you want to generate a JSON document from a ruby data structure call +```ruby +JSON.generate(data) +``` + +You can also use the `pretty_generate` method (which formats the output more +verbosely and nicely) or `fast_generate` (which doesn't do any of the security +checks generate performs, e. g. nesting deepness checks). + +## Casting non native types + +JSON documents can only support Hashes, Arrays, Strings, Integers and Floats. + +By default if you attempt to serialize something else, `JSON.generate` will +search for a `#to_json` method on that object: + +```ruby +Position = Struct.new(:latitude, :longitude) do + def to_json(state = nil, *) + JSON::State.from_state(state).generate({ + latitude: latitude, + longitude: longitude, + }) + end +end + +JSON.generate([ + Position.new(12323.234, 435345.233), + Position.new(23434.676, 159435.324), +]) # => [{"latitude":12323.234,"longitude":435345.233},{"latitude":23434.676,"longitude":159435.324}] +``` + +If a `#to_json` method isn't defined on the object, `JSON.generate` will fallback to call `#to_s`: + +```ruby +JSON.generate(Object.new) # => "#" +``` + +Both of these behavior can be disabled using the `strict: true` option: + +```ruby +JSON.generate(Object.new, strict: true) # => Object not allowed in JSON (JSON::GeneratorError) +JSON.generate(Position.new(1, 2)) # => Position not allowed in JSON (JSON::GeneratorError) +``` + +## JSON::Coder + +Since `#to_json` methods are global, it can sometimes be problematic if you need a given type to be +serialized in different ways in different locations. + +Instead it is recommended to use the newer `JSON::Coder` API: + +```ruby +module MyApp + API_JSON_CODER = JSON::Coder.new do |object| + case object + when Time + object.iso8601(3) + else + object + end + end +end + +puts MyApp::API_JSON_CODER.dump(Time.now.utc) # => "2025-01-21T08:41:44.286Z" +``` + +The provided block is called for all objects that don't have a native JSON equivalent, and +must return a Ruby object that has a native JSON equivalent. + +## Combining JSON fragments + +To combine JSON fragments into a bigger JSON document, you can use `JSON::Fragment`: + +```ruby +posts_json = cache.fetch_multi(post_ids) do |post_id| + JSON.generate(Post.find(post_id)) +end +posts_json.map! { |post_json| JSON::Fragment.new(post_json) } +JSON.generate({ posts: posts_json, count: posts_json.count }) +``` + +## Round-tripping arbitrary types + +> [!CAUTION] +> You should never use `JSON.unsafe_load` nor `JSON.parse(str, create_additions: true)` to parse untrusted user input, +> as it can lead to remote code execution vulnerabilities. + +To create a JSON document from a ruby data structure, you can call +`JSON.generate` like that: + +```ruby +json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] +# => "[1,2,{\"a\":3.141},false,true,null,\"4..10\"]" +``` + +To get back a ruby data structure from a JSON document, you have to call +JSON.parse on it: + +```ruby +JSON.parse json +# => [1, 2, {"a"=>3.141}, false, true, nil, "4..10"] +``` + +Note, that the range from the original data structure is a simple +string now. The reason for this is, that JSON doesn't support ranges +or arbitrary classes. In this case the json library falls back to call +`Object#to_json`, which is the same as `#to_s.to_json`. + +It's possible to add JSON support serialization to arbitrary classes by +simply implementing a more specialized version of the `#to_json method`, that +should return a JSON object (a hash converted to JSON with `#to_json`) like +this (don't forget the `*a` for all the arguments): + +```ruby +class Range + def to_json(*a) + { + 'json_class' => self.class.name, # = 'Range' + 'data' => [ first, last, exclude_end? ] + }.to_json(*a) + end +end +``` + +The hash key `json_class` is the class, that will be asked to deserialise the +JSON representation later. In this case it's `Range`, but any namespace of +the form `A::B` or `::A::B` will do. All other keys are arbitrary and can be +used to store the necessary data to configure the object to be deserialised. + +If the key `json_class` is found in a JSON object, the JSON parser checks +if the given class responds to the `json_create` class method. If so, it is +called with the JSON object converted to a Ruby hash. So a range can +be deserialised by implementing `Range.json_create` like this: + +```ruby +class Range + def self.json_create(o) + new(*o['data']) + end +end +``` + +Now it possible to serialise/deserialise ranges as well: + +```ruby +json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] +# => "[1,2,{\"a\":3.141},false,true,null,{\"json_class\":\"Range\",\"data\":[4,10,false]}]" +JSON.parse json +# => [1, 2, {"a"=>3.141}, false, true, nil, 4..10] +json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] +# => "[1,2,{\"a\":3.141},false,true,null,{\"json_class\":\"Range\",\"data\":[4,10,false]}]" +JSON.unsafe_load json +# => [1, 2, {"a"=>3.141}, false, true, nil, 4..10] +``` + +`JSON.generate` always creates the shortest possible string representation of a +ruby data structure in one line. This is good for data storage or network +protocols, but not so good for humans to read. Fortunately there's also +`JSON.pretty_generate` (or `JSON.pretty_generate`) that creates a more readable +output: + +```ruby + puts JSON.pretty_generate([1, 2, {"a"=>3.141}, false, true, nil, 4..10]) + [ + 1, + 2, + { + "a": 3.141 + }, + false, + true, + null, + { + "json_class": "Range", + "data": [ + 4, + 10, + false + ] + } + ] +``` + +There are also the methods `Kernel#j` for generate, and `Kernel#jj` for +`pretty_generate` output to the console, that work analogous to Core Ruby's `p` and +the `pp` library's `pp` methods. + +## Development + +### Prerequisites + +1. Clone the repository +2. Install dependencies with `bundle install` + +### Testing + +The full test suite can be run with: + +```bash +bundle exec rake test +``` + +### Release + +Update the `lib/json/version.rb` file. + +``` +rbenv shell 2.6.5 +rake build +gem push pkg/json-2.3.0.gem + +rbenv shell jruby-9.2.9.0 +rake build +gem push pkg/json-2.3.0-java.gem +``` + +## Author + +Florian Frank + +## License + +Ruby License, see https://www.ruby-lang.org/en/about/license.txt. + +## Download + +The latest version of this library can be downloaded at + +* https://rubygems.org/gems/json + +Online Documentation should be located at + +* https://www.rubydoc.info/gems/json + +[Ragel]: http://www.colm.net/open-source/ragel/ diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/fbuffer/fbuffer.h b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/fbuffer/fbuffer.h new file mode 100644 index 000000000..d32371476 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/fbuffer/fbuffer.h @@ -0,0 +1,270 @@ +#ifndef _FBUFFER_H_ +#define _FBUFFER_H_ + +#include "ruby.h" +#include "ruby/encoding.h" +#include "../vendor/jeaiii-ltoa.h" + +/* shims */ +/* This is the fallback definition from Ruby 3.4 */ + +#ifndef RBIMPL_STDBOOL_H +#if defined(__cplusplus) +# if defined(HAVE_STDBOOL_H) && (__cplusplus >= 201103L) +# include +# endif +#elif defined(HAVE_STDBOOL_H) +# include +#elif !defined(HAVE__BOOL) +typedef unsigned char _Bool; +# define bool _Bool +# define true ((_Bool)+1) +# define false ((_Bool)+0) +# define __bool_true_false_are_defined +#endif +#endif + +#ifndef RB_UNLIKELY +#define RB_UNLIKELY(expr) expr +#endif + +#ifndef RB_LIKELY +#define RB_LIKELY(expr) expr +#endif + +#ifndef MAYBE_UNUSED +# define MAYBE_UNUSED(x) x +#endif + +#ifdef RUBY_DEBUG +#ifndef JSON_DEBUG +#define JSON_DEBUG RUBY_DEBUG +#endif +#endif + +enum fbuffer_type { + FBUFFER_HEAP_ALLOCATED = 0, + FBUFFER_STACK_ALLOCATED = 1, +}; + +typedef struct FBufferStruct { + enum fbuffer_type type; + unsigned long initial_length; + unsigned long len; + unsigned long capa; +#ifdef JSON_DEBUG + unsigned long requested; +#endif + char *ptr; + VALUE io; +} FBuffer; + +#define FBUFFER_STACK_SIZE 512 +#define FBUFFER_IO_BUFFER_SIZE (16384 - 1) +#define FBUFFER_INITIAL_LENGTH_DEFAULT 1024 + +#define FBUFFER_PTR(fb) ((fb)->ptr) +#define FBUFFER_LEN(fb) ((fb)->len) +#define FBUFFER_CAPA(fb) ((fb)->capa) +#define FBUFFER_PAIR(fb) FBUFFER_PTR(fb), FBUFFER_LEN(fb) + +static void fbuffer_free(FBuffer *fb); +static void fbuffer_clear(FBuffer *fb); +static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len); +static void fbuffer_append_long(FBuffer *fb, long number); +static inline void fbuffer_append_char(FBuffer *fb, char newchr); +static VALUE fbuffer_finalize(FBuffer *fb); + +static void fbuffer_stack_init(FBuffer *fb, unsigned long initial_length, char *stack_buffer, long stack_buffer_size) +{ + fb->initial_length = (initial_length > 0) ? initial_length : FBUFFER_INITIAL_LENGTH_DEFAULT; + if (stack_buffer) { + fb->type = FBUFFER_STACK_ALLOCATED; + fb->ptr = stack_buffer; + fb->capa = stack_buffer_size; + } +#ifdef JSON_DEBUG + fb->requested = 0; +#endif +} + +static inline void fbuffer_consumed(FBuffer *fb, unsigned long consumed) +{ +#ifdef JSON_DEBUG + if (consumed > fb->requested) { + rb_bug("fbuffer: Out of bound write"); + } + fb->requested = 0; +#endif + fb->len += consumed; +} + +static void fbuffer_free(FBuffer *fb) +{ + if (fb->ptr && fb->type == FBUFFER_HEAP_ALLOCATED) { + ruby_xfree(fb->ptr); + } +} + +static void fbuffer_clear(FBuffer *fb) +{ + fb->len = 0; +} + +static void fbuffer_flush(FBuffer *fb) +{ + rb_io_write(fb->io, rb_utf8_str_new(fb->ptr, fb->len)); + fbuffer_clear(fb); +} + +static void fbuffer_realloc(FBuffer *fb, unsigned long required) +{ + if (required > fb->capa) { + if (fb->type == FBUFFER_STACK_ALLOCATED) { + const char *old_buffer = fb->ptr; + fb->ptr = ALLOC_N(char, required); + fb->type = FBUFFER_HEAP_ALLOCATED; + MEMCPY(fb->ptr, old_buffer, char, fb->len); + } else { + REALLOC_N(fb->ptr, char, required); + } + fb->capa = required; + } +} + +static void fbuffer_do_inc_capa(FBuffer *fb, unsigned long requested) +{ + if (RB_UNLIKELY(fb->io)) { + if (fb->capa < FBUFFER_IO_BUFFER_SIZE) { + fbuffer_realloc(fb, FBUFFER_IO_BUFFER_SIZE); + } else { + fbuffer_flush(fb); + } + + if (RB_LIKELY(requested < fb->capa)) { + return; + } + } + + unsigned long required; + + if (RB_UNLIKELY(!fb->ptr)) { + fb->ptr = ALLOC_N(char, fb->initial_length); + fb->capa = fb->initial_length; + } + + for (required = fb->capa; requested > required - fb->len; required <<= 1); + + fbuffer_realloc(fb, required); +} + +static inline void fbuffer_inc_capa(FBuffer *fb, unsigned long requested) +{ +#ifdef JSON_DEBUG + fb->requested = requested; +#endif + + if (RB_UNLIKELY(requested > fb->capa - fb->len)) { + fbuffer_do_inc_capa(fb, requested); + } +} + +static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len) +{ + if (len > 0) { + fbuffer_inc_capa(fb, len); + MEMCPY(fb->ptr + fb->len, newstr, char, len); + fbuffer_consumed(fb, len); + } +} + +/* Appends a character into a buffer. The buffer needs to have sufficient capacity, via fbuffer_inc_capa(...). */ +static inline void fbuffer_append_reserved_char(FBuffer *fb, char chr) +{ +#ifdef JSON_DEBUG + if (fb->requested < 1) { + rb_bug("fbuffer: unreserved write"); + } + fb->requested--; +#endif + + fb->ptr[fb->len] = chr; + fb->len++; +} + +static void fbuffer_append_str(FBuffer *fb, VALUE str) +{ + const char *newstr = StringValuePtr(str); + unsigned long len = RSTRING_LEN(str); + + RB_GC_GUARD(str); + + fbuffer_append(fb, newstr, len); +} + +static inline void fbuffer_append_char(FBuffer *fb, char newchr) +{ + fbuffer_inc_capa(fb, 1); + *(fb->ptr + fb->len) = newchr; + fbuffer_consumed(fb, 1); +} + +static inline char *fbuffer_cursor(FBuffer *fb) +{ + return fb->ptr + fb->len; +} + +static inline void fbuffer_advance_to(FBuffer *fb, char *end) +{ + fbuffer_consumed(fb, (end - fb->ptr) - fb->len); +} + +/* + * Appends the decimal string representation of \a number into the buffer. + */ +static void fbuffer_append_long(FBuffer *fb, long number) +{ + /* + * The jeaiii_ultoa() function produces digits left-to-right, + * allowing us to write directly into the buffer, but we don't know + * the number of resulting characters. + * + * We do know, however, that the `number` argument is always in the + * range 0xc000000000000000 to 0x3fffffffffffffff, or, in decimal, + * -4611686018427387904 to 4611686018427387903. The max number of chars + * generated is therefore 20 (including a potential sign character). + */ + + static const int MAX_CHARS_FOR_LONG = 20; + + fbuffer_inc_capa(fb, MAX_CHARS_FOR_LONG); + + if (number < 0) { + fbuffer_append_reserved_char(fb, '-'); + + /* + * Since number is always > LONG_MIN, `-number` will not overflow + * and is always the positive abs() value. + */ + number = -number; + } + + char *end = jeaiii_ultoa(fbuffer_cursor(fb), number); + fbuffer_advance_to(fb, end); +} + +static VALUE fbuffer_finalize(FBuffer *fb) +{ + if (fb->io) { + fbuffer_flush(fb); + fbuffer_free(fb); + rb_io_flush(fb->io); + return fb->io; + } else { + VALUE result = rb_utf8_str_new(FBUFFER_PTR(fb), FBUFFER_LEN(fb)); + fbuffer_free(fb); + return result; + } +} + +#endif diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/Makefile b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/Makefile new file mode 100644 index 000000000..84e836952 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/Makefile @@ -0,0 +1,269 @@ + +SHELL = /bin/sh + +# V=0 quiet, V=1 verbose. other values don't work. +V = 0 +V0 = $(V:0=) +Q1 = $(V:1=) +Q = $(Q1:0=@) +ECHO1 = $(V:1=@ :) +ECHO = $(ECHO1:0=@ echo) +NULLCMD = : + +#### Start of system configuration section. #### + +srcdir = . +topdir = /usr/include/ruby-3.2.0 +hdrdir = $(topdir) +arch_hdrdir = /usr/include/x86_64-linux-gnu/ruby-3.2.0 +PATH_SEPARATOR = : +VPATH = $(srcdir):$(arch_hdrdir)/ruby:$(hdrdir)/ruby +prefix = $(DESTDIR)/usr +rubysitearchprefix = $(sitearchlibdir)/$(RUBY_BASE_NAME) +rubyarchprefix = $(archlibdir)/$(RUBY_BASE_NAME) +rubylibprefix = $(libdir)/$(RUBY_BASE_NAME) +exec_prefix = $(prefix) +vendorarchhdrdir = $(sitearchincludedir)/$(RUBY_VERSION_NAME)/vendor_ruby +sitearchhdrdir = $(sitearchincludedir)/$(RUBY_VERSION_NAME)/site_ruby +rubyarchhdrdir = $(archincludedir)/$(RUBY_VERSION_NAME) +vendorhdrdir = $(rubyhdrdir)/vendor_ruby +sitehdrdir = $(rubyhdrdir)/site_ruby +rubyhdrdir = $(includedir)/$(RUBY_VERSION_NAME) +vendorarchdir = $(rubysitearchprefix)/vendor_ruby/$(ruby_version) +vendorlibdir = $(vendordir)/$(ruby_version) +vendordir = $(rubylibprefix)/vendor_ruby +sitearchdir = $(DESTDIR)/usr/local/lib/x86_64-linux-gnu/site_ruby +sitelibdir = $(sitedir)/$(ruby_version) +sitedir = $(DESTDIR)/usr/local/lib/site_ruby +rubyarchdir = $(rubyarchprefix)/$(ruby_version) +rubylibdir = $(rubylibprefix)/$(ruby_version) +sitearchincludedir = $(includedir)/$(sitearch) +archincludedir = $(includedir)/$(arch) +sitearchlibdir = $(libdir)/$(sitearch) +archlibdir = $(libdir)/$(arch) +ridir = $(datarootdir)/$(RI_BASE_NAME) +mandir = $(datarootdir)/man +localedir = $(datarootdir)/locale +libdir = $(exec_prefix)/lib +psdir = $(docdir) +pdfdir = $(docdir) +dvidir = $(docdir) +htmldir = $(docdir) +infodir = $(datarootdir)/info +docdir = $(datarootdir)/doc/$(PACKAGE) +oldincludedir = $(DESTDIR)/usr/include +includedir = $(prefix)/include +runstatedir = $(DESTDIR)/var/run +localstatedir = $(DESTDIR)/var +sharedstatedir = $(prefix)/com +sysconfdir = $(DESTDIR)/etc +datadir = $(datarootdir) +datarootdir = $(prefix)/share +libexecdir = $(exec_prefix)/libexec +sbindir = $(exec_prefix)/sbin +bindir = $(exec_prefix)/bin +archdir = $(rubyarchdir) + + +CC_WRAPPER = +CC = x86_64-linux-gnu-gcc +CXX = x86_64-linux-gnu-g++ +LIBRUBY = $(LIBRUBY_SO) +LIBRUBY_A = lib$(RUBY_SO_NAME)-static.a +LIBRUBYARG_SHARED = -l$(RUBY_SO_NAME) +LIBRUBYARG_STATIC = -l$(RUBY_SO_NAME)-static $(MAINLIBS) +empty = +OUTFLAG = -o $(empty) +COUTFLAG = -o $(empty) +CSRCFLAG = $(empty) + +RUBY_EXTCONF_H = +cflags = $(optflags) $(debugflags) $(warnflags) +cxxflags = +optflags = -O3 -fno-fast-math +debugflags = -ggdb3 +warnflags = -Wall -Wextra -Wdeprecated-declarations -Wdiv-by-zero -Wduplicated-cond -Wimplicit-function-declaration -Wimplicit-int -Wmisleading-indentation -Wpointer-arith -Wwrite-strings -Wold-style-definition -Wimplicit-fallthrough=0 -Wmissing-noreturn -Wno-cast-function-type -Wno-constant-logical-operand -Wno-long-long -Wno-missing-field-initializers -Wno-overlength-strings -Wno-packed-bitfield-compat -Wno-parentheses-equality -Wno-self-assign -Wno-tautological-compare -Wno-unused-parameter -Wno-unused-value -Wsuggest-attribute=format -Wsuggest-attribute=noreturn -Wunused-variable -Wundef +cppflags = +CCDLFLAGS = -fPIC +CFLAGS = $(CCDLFLAGS) -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC -std=c99 $(ARCH_FLAG) +INCFLAGS = -I. -I$(arch_hdrdir) -I$(hdrdir)/ruby/backward -I$(hdrdir) -I$(srcdir) +DEFS = +CPPFLAGS = -DJSON_GENERATOR -DHAVE_X86INTRIN_H -DJSON_ENABLE_SIMD -DHAVE_CPUID_H -Wdate-time -D_FORTIFY_SOURCE=3 $(DEFS) $(cppflags) +CXXFLAGS = $(CCDLFLAGS) -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 $(ARCH_FLAG) +ldflags = -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed +dldflags = -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now +ARCH_FLAG = +DLDFLAGS = $(ldflags) $(dldflags) $(ARCH_FLAG) +LDSHARED = $(CC) -shared +LDSHAREDXX = $(CXX) -shared +AR = x86_64-linux-gnu-gcc-ar +EXEEXT = + +RUBY_INSTALL_NAME = $(RUBY_BASE_NAME)3.2 +RUBY_SO_NAME = ruby-3.2 +RUBYW_INSTALL_NAME = +RUBY_VERSION_NAME = $(RUBY_BASE_NAME)-$(ruby_version) +RUBYW_BASE_NAME = rubyw +RUBY_BASE_NAME = ruby + +arch = x86_64-linux-gnu +sitearch = $(arch) +ruby_version = 3.2.0 +ruby = $(bindir)/$(RUBY_BASE_NAME)3.2 +RUBY = $(ruby) +BUILTRUBY = $(bindir)/$(RUBY_BASE_NAME)3.2 +ruby_headers = $(hdrdir)/ruby.h $(hdrdir)/ruby/backward.h $(hdrdir)/ruby/ruby.h $(hdrdir)/ruby/defines.h $(hdrdir)/ruby/missing.h $(hdrdir)/ruby/intern.h $(hdrdir)/ruby/st.h $(hdrdir)/ruby/subst.h $(arch_hdrdir)/ruby/config.h + +RM = rm -f +RM_RF = rm -fr +RMDIRS = rmdir --ignore-fail-on-non-empty -p +MAKEDIRS = /bin/mkdir -p +INSTALL = /usr/bin/install -c +INSTALL_PROG = $(INSTALL) -m 0755 +INSTALL_DATA = $(INSTALL) -m 644 +COPY = cp +TOUCH = exit > + +#### End of system configuration section. #### + +preload = +libpath = . $(archlibdir) +LIBPATH = -L. -L$(archlibdir) +DEFFILE = + +CLEANFILES = mkmf.log +DISTCLEANFILES = +DISTCLEANDIRS = + +extout = +extout_prefix = +target_prefix = /json/ext +LOCAL_LIBS = +LIBS = $(LIBRUBYARG_SHARED) -lm -lpthread -lc +ORIG_SRCS = generator.c +SRCS = $(ORIG_SRCS) +OBJS = generator.o +HDRS = +LOCAL_HDRS = +TARGET = generator +TARGET_NAME = generator +TARGET_ENTRY = Init_$(TARGET_NAME) +DLLIB = $(TARGET).so +EXTSTATIC = +STATIC_LIB = + +TIMESTAMP_DIR = . +BINDIR = $(bindir) +RUBYCOMMONDIR = $(sitedir)$(target_prefix) +RUBYLIBDIR = $(sitelibdir)$(target_prefix) +RUBYARCHDIR = $(sitearchdir)$(target_prefix) +HDRDIR = $(sitehdrdir)$(target_prefix) +ARCHHDRDIR = $(sitearchhdrdir)$(target_prefix) +TARGET_SO_DIR = +TARGET_SO = $(TARGET_SO_DIR)$(DLLIB) +CLEANLIBS = $(TARGET_SO) false +CLEANOBJS = $(OBJS) *.bak +TARGET_SO_DIR_TIMESTAMP = $(TIMESTAMP_DIR)/.sitearchdir.-.json.-.ext.time + +all: $(DLLIB) +static: $(STATIC_LIB) +.PHONY: all install static install-so install-rb +.PHONY: clean clean-so clean-static clean-rb + +clean-static:: +clean-rb-default:: +clean-rb:: +clean-so:: +clean: clean-so clean-static clean-rb-default clean-rb + -$(Q)$(RM_RF) $(CLEANLIBS) $(CLEANOBJS) $(CLEANFILES) .*.time + +distclean-rb-default:: +distclean-rb:: +distclean-so:: +distclean-static:: +distclean: clean distclean-so distclean-static distclean-rb-default distclean-rb + -$(Q)$(RM) Makefile $(RUBY_EXTCONF_H) conftest.* mkmf.log + -$(Q)$(RM) core ruby$(EXEEXT) *~ $(DISTCLEANFILES) + -$(Q)$(RMDIRS) $(DISTCLEANDIRS) 2> /dev/null || true + +realclean: distclean +install: install-so install-rb + +install-so: $(DLLIB) $(TARGET_SO_DIR_TIMESTAMP) + $(INSTALL_PROG) $(DLLIB) $(RUBYARCHDIR) +clean-static:: + -$(Q)$(RM) $(STATIC_LIB) +install-rb: pre-install-rb do-install-rb install-rb-default +install-rb-default: pre-install-rb-default do-install-rb-default +pre-install-rb: Makefile +pre-install-rb-default: Makefile +do-install-rb: +do-install-rb-default: +pre-install-rb-default: + @$(NULLCMD) +$(TARGET_SO_DIR_TIMESTAMP): + $(Q) $(MAKEDIRS) $(@D) $(RUBYARCHDIR) + $(Q) $(TOUCH) $@ + +site-install: site-install-so site-install-rb +site-install-so: install-so +site-install-rb: install-rb + +.SUFFIXES: .c .m .cc .mm .cxx .cpp .o .S + +.cc.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.cc.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.mm.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.mm.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.cxx.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.cxx.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.cpp.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.cpp.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.c.o: + $(ECHO) compiling $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.c.S: + $(ECHO) translating $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.m.o: + $(ECHO) compiling $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.m.S: + $(ECHO) translating $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +$(TARGET_SO): $(OBJS) Makefile + $(ECHO) linking shared-object json/ext/$(DLLIB) + -$(Q)$(RM) $(@) + $(Q) $(LDSHARED) -o $@ $(OBJS) $(LIBPATH) $(DLDFLAGS) $(LOCAL_LIBS) $(LIBS) + + + +$(OBJS): $(HDRS) $(ruby_headers) diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/extconf.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/extconf.rb new file mode 100644 index 000000000..fb9afd07f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/extconf.rb @@ -0,0 +1,16 @@ +require 'mkmf' + +if RUBY_ENGINE == 'truffleruby' + # The pure-Ruby generator is faster on TruffleRuby, so skip compiling the generator extension + File.write('Makefile', dummy_makefile("").join) +else + append_cflags("-std=c99") + $defs << "-DJSON_GENERATOR" + $defs << "-DJSON_DEBUG" if ENV["JSON_DEBUG"] + + if enable_config('generator-use-simd', default=!ENV["JSON_DISABLE_SIMD"]) + load __dir__ + "/../simd/conf.rb" + end + + create_makefile 'json/ext/generator' +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/generator.c b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/generator.c new file mode 100644 index 000000000..d60fbba41 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/generator/generator.c @@ -0,0 +1,2162 @@ +#include "ruby.h" +#include "../fbuffer/fbuffer.h" +#include "../vendor/fpconv.c" + +#include +#include + +#include "../simd/simd.h" + +/* ruby api and some helpers */ + +typedef struct JSON_Generator_StateStruct { + VALUE indent; + VALUE space; + VALUE space_before; + VALUE object_nl; + VALUE array_nl; + VALUE as_json; + + long max_nesting; + long depth; + long buffer_initial_length; + + bool allow_nan; + bool ascii_only; + bool script_safe; + bool strict; +} JSON_Generator_State; + +#ifndef RB_UNLIKELY +#define RB_UNLIKELY(cond) (cond) +#endif + +static VALUE mJSON, cState, cFragment, mString_Extend, eGeneratorError, eNestingError, Encoding_UTF_8; + +static ID i_to_s, i_to_json, i_new, i_pack, i_unpack, i_create_id, i_extend, i_encode; +static VALUE sym_indent, sym_space, sym_space_before, sym_object_nl, sym_array_nl, sym_max_nesting, sym_allow_nan, + sym_ascii_only, sym_depth, sym_buffer_initial_length, sym_script_safe, sym_escape_slash, sym_strict, sym_as_json; + + +#define GET_STATE_TO(self, state) \ + TypedData_Get_Struct(self, JSON_Generator_State, &JSON_Generator_State_type, state) + +#define GET_STATE(self) \ + JSON_Generator_State *state; \ + GET_STATE_TO(self, state) + +struct generate_json_data; + +typedef void (*generator_func)(FBuffer *buffer, struct generate_json_data *data, VALUE obj); + +struct generate_json_data { + FBuffer *buffer; + VALUE vstate; + JSON_Generator_State *state; + VALUE obj; + generator_func func; +}; + +static VALUE cState_from_state_s(VALUE self, VALUE opts); +static VALUE cState_partial_generate(VALUE self, VALUE obj, generator_func, VALUE io); +static void generate_json(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_object(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_array(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_string(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_null(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_false(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_true(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +#ifdef RUBY_INTEGER_UNIFICATION +static void generate_json_integer(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +#endif +static void generate_json_fixnum(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_bignum(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_float(FBuffer *buffer, struct generate_json_data *data, VALUE obj); +static void generate_json_fragment(FBuffer *buffer, struct generate_json_data *data, VALUE obj); + +static int usascii_encindex, utf8_encindex, binary_encindex; + +#ifdef RBIMPL_ATTR_NORETURN +RBIMPL_ATTR_NORETURN() +#endif +static void raise_generator_error_str(VALUE invalid_object, VALUE str) +{ + VALUE exc = rb_exc_new_str(eGeneratorError, str); + rb_ivar_set(exc, rb_intern("@invalid_object"), invalid_object); + rb_exc_raise(exc); +} + +#ifdef RBIMPL_ATTR_NORETURN +RBIMPL_ATTR_NORETURN() +#endif +#ifdef RBIMPL_ATTR_FORMAT +RBIMPL_ATTR_FORMAT(RBIMPL_PRINTF_FORMAT, 2, 3) +#endif +static void raise_generator_error(VALUE invalid_object, const char *fmt, ...) +{ + va_list args; + va_start(args, fmt); + VALUE str = rb_vsprintf(fmt, args); + va_end(args); + raise_generator_error_str(invalid_object, str); +} + +// 0 - single byte char that don't need to be escaped. +// (x | 8) - char that needs to be escaped. +static const unsigned char CHAR_LENGTH_MASK = 7; +static const unsigned char ESCAPE_MASK = 8; + +typedef struct _search_state { + const char *ptr; + const char *end; + const char *cursor; + FBuffer *buffer; + +#ifdef HAVE_SIMD + const char *chunk_base; + const char *chunk_end; + bool has_matches; + +#if defined(HAVE_SIMD_NEON) + uint64_t matches_mask; +#elif defined(HAVE_SIMD_SSE2) + int matches_mask; +#else +#error "Unknown SIMD Implementation." +#endif /* HAVE_SIMD_NEON */ +#endif /* HAVE_SIMD */ +} search_state; + +#if (defined(__GNUC__ ) || defined(__clang__)) +#define FORCE_INLINE __attribute__((always_inline)) +#else +#define FORCE_INLINE +#endif + +static inline FORCE_INLINE void search_flush(search_state *search) +{ + // Do not remove this conditional without profiling, specifically escape-heavy text. + // escape_UTF8_char_basic will advance search->ptr and search->cursor (effectively a search_flush). + // For back-to-back characters that need to be escaped, specifcally for the SIMD code paths, this method + // will be called just before calling escape_UTF8_char_basic. There will be no characers to append for the + // consecutive characters that need to be escaped. While the fbuffer_append is a no-op if + // nothing needs to be flushed, we can save a few memory references with this conditional. + if (search->ptr > search->cursor) { + fbuffer_append(search->buffer, search->cursor, search->ptr - search->cursor); + search->cursor = search->ptr; + } +} + +static const unsigned char escape_table_basic[256] = { + // ASCII Control Characters + 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + // ASCII Characters + 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // '"' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, // '\\' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +}; + +static unsigned char (*search_escape_basic_impl)(search_state *); + +static inline unsigned char search_escape_basic(search_state *search) +{ + while (search->ptr < search->end) { + if (RB_UNLIKELY(escape_table_basic[(const unsigned char)*search->ptr])) { + search_flush(search); + return 1; + } else { + search->ptr++; + } + } + search_flush(search); + return 0; +} + +static inline FORCE_INLINE void escape_UTF8_char_basic(search_state *search) +{ + const unsigned char ch = (unsigned char)*search->ptr; + switch (ch) { + case '"': fbuffer_append(search->buffer, "\\\"", 2); break; + case '\\': fbuffer_append(search->buffer, "\\\\", 2); break; + case '/': fbuffer_append(search->buffer, "\\/", 2); break; + case '\b': fbuffer_append(search->buffer, "\\b", 2); break; + case '\f': fbuffer_append(search->buffer, "\\f", 2); break; + case '\n': fbuffer_append(search->buffer, "\\n", 2); break; + case '\r': fbuffer_append(search->buffer, "\\r", 2); break; + case '\t': fbuffer_append(search->buffer, "\\t", 2); break; + default: { + const char *hexdig = "0123456789abcdef"; + char scratch[6] = { '\\', 'u', '0', '0', 0, 0 }; + scratch[4] = hexdig[(ch >> 4) & 0xf]; + scratch[5] = hexdig[ch & 0xf]; + fbuffer_append(search->buffer, scratch, 6); + break; + } + } + search->ptr++; + search->cursor = search->ptr; +} + +/* Converts in_string to a JSON string (without the wrapping '"' + * characters) in FBuffer out_buffer. + * + * Character are JSON-escaped according to: + * + * - Always: ASCII control characters (0x00-0x1F), dquote, and + * backslash. + * + * - If out_ascii_only: non-ASCII characters (>0x7F) + * + * - If script_safe: forwardslash (/), line separator (U+2028), and + * paragraph separator (U+2029) + * + * Everything else (should be UTF-8) is just passed through and + * appended to the result. + */ +static inline void convert_UTF8_to_JSON(search_state *search) +{ + while (search_escape_basic_impl(search)) { + escape_UTF8_char_basic(search); + } +} + +static inline void escape_UTF8_char(search_state *search, unsigned char ch_len) +{ + const unsigned char ch = (unsigned char)*search->ptr; + switch (ch_len) { + case 1: { + switch (ch) { + case '"': fbuffer_append(search->buffer, "\\\"", 2); break; + case '\\': fbuffer_append(search->buffer, "\\\\", 2); break; + case '/': fbuffer_append(search->buffer, "\\/", 2); break; + case '\b': fbuffer_append(search->buffer, "\\b", 2); break; + case '\f': fbuffer_append(search->buffer, "\\f", 2); break; + case '\n': fbuffer_append(search->buffer, "\\n", 2); break; + case '\r': fbuffer_append(search->buffer, "\\r", 2); break; + case '\t': fbuffer_append(search->buffer, "\\t", 2); break; + default: { + const char *hexdig = "0123456789abcdef"; + char scratch[6] = { '\\', 'u', '0', '0', 0, 0 }; + scratch[4] = hexdig[(ch >> 4) & 0xf]; + scratch[5] = hexdig[ch & 0xf]; + fbuffer_append(search->buffer, scratch, 6); + break; + } + } + break; + } + case 3: { + if (search->ptr[2] & 1) { + fbuffer_append(search->buffer, "\\u2029", 6); + } else { + fbuffer_append(search->buffer, "\\u2028", 6); + } + break; + } + } + search->cursor = (search->ptr += ch_len); +} + +#ifdef HAVE_SIMD + +static inline FORCE_INLINE char *copy_remaining_bytes(search_state *search, unsigned long vec_len, unsigned long len) +{ + // Flush the buffer so everything up until the last 'len' characters are unflushed. + search_flush(search); + + FBuffer *buf = search->buffer; + fbuffer_inc_capa(buf, vec_len); + + char *s = (buf->ptr + buf->len); + + // Pad the buffer with dummy characters that won't need escaping. + // This seem wateful at first sight, but memset of vector length is very fast. + memset(s, 'X', vec_len); + + // Optimistically copy the remaining 'len' characters to the output FBuffer. If there are no characters + // to escape, then everything ends up in the correct spot. Otherwise it was convenient temporary storage. + MEMCPY(s, search->ptr, char, len); + + return s; +} + +#ifdef HAVE_SIMD_NEON + +static inline FORCE_INLINE unsigned char neon_next_match(search_state *search) +{ + uint64_t mask = search->matches_mask; + uint32_t index = trailing_zeros64(mask) >> 2; + + // It is assumed escape_UTF8_char_basic will only ever increase search->ptr by at most one character. + // If we want to use a similar approach for full escaping we'll need to ensure: + // search->chunk_base + index >= search->ptr + // However, since we know escape_UTF8_char_basic only increases search->ptr by one, if the next match + // is one byte after the previous match then: + // search->chunk_base + index == search->ptr + search->ptr = search->chunk_base + index; + mask &= mask - 1; + search->matches_mask = mask; + search_flush(search); + return 1; +} + +static inline unsigned char search_escape_basic_neon(search_state *search) +{ + if (RB_UNLIKELY(search->has_matches)) { + // There are more matches if search->matches_mask > 0. + if (search->matches_mask > 0) { + return neon_next_match(search); + } else { + // neon_next_match will only advance search->ptr up to the last matching character. + // Skip over any characters in the last chunk that occur after the last match. + search->has_matches = false; + search->ptr = search->chunk_end; + } + } + + /* + * The code below implements an SIMD-based algorithm to determine if N bytes at a time + * need to be escaped. + * + * Assume the ptr = "Te\sting!" (the double quotes are included in the string) + * + * The explanation will be limited to the first 8 bytes of the string for simplicity. However + * the vector insructions may work on larger vectors. + * + * First, we load three constants 'lower_bound', 'backslash' and 'dblquote" in vector registers. + * + * lower_bound: [20 20 20 20 20 20 20 20] + * backslash: [5C 5C 5C 5C 5C 5C 5C 5C] + * dblquote: [22 22 22 22 22 22 22 22] + * + * Next we load the first chunk of the ptr: + * [22 54 65 5C 73 74 69 6E] (" T e \ s t i n) + * + * First we check if any byte in chunk is less than 32 (0x20). This returns the following vector + * as no bytes are less than 32 (0x20): + * [0 0 0 0 0 0 0 0] + * + * Next, we check if any byte in chunk is equal to a backslash: + * [0 0 0 FF 0 0 0 0] + * + * Finally we check if any byte in chunk is equal to a double quote: + * [FF 0 0 0 0 0 0 0] + * + * Now we have three vectors where each byte indicates if the corresponding byte in chunk + * needs to be escaped. We combine these vectors with a series of logical OR instructions. + * This is the needs_escape vector and it is equal to: + * [FF 0 0 FF 0 0 0 0] + * + * Next we compute the bitwise AND between each byte and 0x1 and compute the horizontal sum of + * the values in the vector. This computes how many bytes need to be escaped within this chunk. + * + * Finally we compute a mask that indicates which bytes need to be escaped. If the mask is 0 then, + * no bytes need to be escaped and we can continue to the next chunk. If the mask is not 0 then we + * have at least one byte that needs to be escaped. + */ + + if (string_scan_simd_neon(&search->ptr, search->end, &search->matches_mask)) { + search->has_matches = true; + search->chunk_base = search->ptr; + search->chunk_end = search->ptr + sizeof(uint8x16_t); + return neon_next_match(search); + } + + // There are fewer than 16 bytes left. + unsigned long remaining = (search->end - search->ptr); + if (remaining >= SIMD_MINIMUM_THRESHOLD) { + char *s = copy_remaining_bytes(search, sizeof(uint8x16_t), remaining); + + uint64_t mask = compute_chunk_mask_neon(s); + + if (!mask) { + // Nothing to escape, ensure search_flush doesn't do anything by setting + // search->cursor to search->ptr. + fbuffer_consumed(search->buffer, remaining); + search->ptr = search->end; + search->cursor = search->end; + return 0; + } + + search->matches_mask = mask; + search->has_matches = true; + search->chunk_end = search->end; + search->chunk_base = search->ptr; + return neon_next_match(search); + } + + if (search->ptr < search->end) { + return search_escape_basic(search); + } + + search_flush(search); + return 0; +} +#endif /* HAVE_SIMD_NEON */ + +#ifdef HAVE_SIMD_SSE2 + +static inline FORCE_INLINE unsigned char sse2_next_match(search_state *search) +{ + int mask = search->matches_mask; + int index = trailing_zeros(mask); + + // It is assumed escape_UTF8_char_basic will only ever increase search->ptr by at most one character. + // If we want to use a similar approach for full escaping we'll need to ensure: + // search->chunk_base + index >= search->ptr + // However, since we know escape_UTF8_char_basic only increases search->ptr by one, if the next match + // is one byte after the previous match then: + // search->chunk_base + index == search->ptr + search->ptr = search->chunk_base + index; + mask &= mask - 1; + search->matches_mask = mask; + search_flush(search); + return 1; +} + +#if defined(__clang__) || defined(__GNUC__) +#define TARGET_SSE2 __attribute__((target("sse2"))) +#else +#define TARGET_SSE2 +#endif + +static inline TARGET_SSE2 FORCE_INLINE unsigned char search_escape_basic_sse2(search_state *search) +{ + if (RB_UNLIKELY(search->has_matches)) { + // There are more matches if search->matches_mask > 0. + if (search->matches_mask > 0) { + return sse2_next_match(search); + } else { + // sse2_next_match will only advance search->ptr up to the last matching character. + // Skip over any characters in the last chunk that occur after the last match. + search->has_matches = false; + if (RB_UNLIKELY(search->chunk_base + sizeof(__m128i) >= search->end)) { + search->ptr = search->end; + } else { + search->ptr = search->chunk_base + sizeof(__m128i); + } + } + } + + if (string_scan_simd_sse2(&search->ptr, search->end, &search->matches_mask)) { + search->has_matches = true; + search->chunk_base = search->ptr; + search->chunk_end = search->ptr + sizeof(__m128i); + return sse2_next_match(search); + } + + // There are fewer than 16 bytes left. + unsigned long remaining = (search->end - search->ptr); + if (remaining >= SIMD_MINIMUM_THRESHOLD) { + char *s = copy_remaining_bytes(search, sizeof(__m128i), remaining); + + int needs_escape_mask = compute_chunk_mask_sse2(s); + + if (needs_escape_mask == 0) { + // Nothing to escape, ensure search_flush doesn't do anything by setting + // search->cursor to search->ptr. + fbuffer_consumed(search->buffer, remaining); + search->ptr = search->end; + search->cursor = search->end; + return 0; + } + + search->has_matches = true; + search->matches_mask = needs_escape_mask; + search->chunk_base = search->ptr; + return sse2_next_match(search); + } + + if (search->ptr < search->end) { + return search_escape_basic(search); + } + + search_flush(search); + return 0; +} + +#endif /* HAVE_SIMD_SSE2 */ + +#endif /* HAVE_SIMD */ + +static const unsigned char script_safe_escape_table[256] = { + // ASCII Control Characters + 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + // ASCII Characters + 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, // '"' and '/' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, // '\\' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // Continuation byte + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + // First byte of a 2-byte code point + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + // First byte of a 3-byte code point + 3, 3,11, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 0xE2 is the start of \u2028 and \u2029 + //First byte of a 4+ byte code point + 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 9, 9, +}; + +static inline unsigned char search_script_safe_escape(search_state *search) +{ + while (search->ptr < search->end) { + unsigned char ch = (unsigned char)*search->ptr; + unsigned char ch_len = script_safe_escape_table[ch]; + + if (RB_UNLIKELY(ch_len)) { + if (ch_len & ESCAPE_MASK) { + if (RB_UNLIKELY(ch_len == 11)) { + const unsigned char *uptr = (const unsigned char *)search->ptr; + if (!(uptr[1] == 0x80 && (uptr[2] >> 1) == 0x54)) { + search->ptr += 3; + continue; + } + } + search_flush(search); + return ch_len & CHAR_LENGTH_MASK; + } else { + search->ptr += ch_len; + } + } else { + search->ptr++; + } + } + search_flush(search); + return 0; +} + +static void convert_UTF8_to_script_safe_JSON(search_state *search) +{ + unsigned char ch_len; + while ((ch_len = search_script_safe_escape(search))) { + escape_UTF8_char(search, ch_len); + } +} + +static const unsigned char ascii_only_escape_table[256] = { + // ASCII Control Characters + 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + // ASCII Characters + 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // '"' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, // '\\' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // Continuation byte + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + // First byte of a 2-byte code point + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + // First byte of a 3-byte code point + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + //First byte of a 4+ byte code point + 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 9, 9, +}; + +static inline unsigned char search_ascii_only_escape(search_state *search, const unsigned char escape_table[256]) +{ + while (search->ptr < search->end) { + unsigned char ch = (unsigned char)*search->ptr; + unsigned char ch_len = escape_table[ch]; + + if (RB_UNLIKELY(ch_len)) { + search_flush(search); + return ch_len & CHAR_LENGTH_MASK; + } else { + search->ptr++; + } + } + search_flush(search); + return 0; +} + +static inline void full_escape_UTF8_char(search_state *search, unsigned char ch_len) +{ + const unsigned char ch = (unsigned char)*search->ptr; + switch (ch_len) { + case 1: { + switch (ch) { + case '"': fbuffer_append(search->buffer, "\\\"", 2); break; + case '\\': fbuffer_append(search->buffer, "\\\\", 2); break; + case '/': fbuffer_append(search->buffer, "\\/", 2); break; + case '\b': fbuffer_append(search->buffer, "\\b", 2); break; + case '\f': fbuffer_append(search->buffer, "\\f", 2); break; + case '\n': fbuffer_append(search->buffer, "\\n", 2); break; + case '\r': fbuffer_append(search->buffer, "\\r", 2); break; + case '\t': fbuffer_append(search->buffer, "\\t", 2); break; + default: { + const char *hexdig = "0123456789abcdef"; + char scratch[6] = { '\\', 'u', '0', '0', 0, 0 }; + scratch[4] = hexdig[(ch >> 4) & 0xf]; + scratch[5] = hexdig[ch & 0xf]; + fbuffer_append(search->buffer, scratch, 6); + break; + } + } + break; + } + default: { + const char *hexdig = "0123456789abcdef"; + char scratch[12] = { '\\', 'u', 0, 0, 0, 0, '\\', 'u' }; + + uint32_t wchar = 0; + + switch (ch_len) { + case 2: + wchar = ch & 0x1F; + break; + case 3: + wchar = ch & 0x0F; + break; + case 4: + wchar = ch & 0x07; + break; + } + + for (short i = 1; i < ch_len; i++) { + wchar = (wchar << 6) | (search->ptr[i] & 0x3F); + } + + if (wchar <= 0xFFFF) { + scratch[2] = hexdig[wchar >> 12]; + scratch[3] = hexdig[(wchar >> 8) & 0xf]; + scratch[4] = hexdig[(wchar >> 4) & 0xf]; + scratch[5] = hexdig[wchar & 0xf]; + fbuffer_append(search->buffer, scratch, 6); + } else { + uint16_t hi, lo; + wchar -= 0x10000; + hi = 0xD800 + (uint16_t)(wchar >> 10); + lo = 0xDC00 + (uint16_t)(wchar & 0x3FF); + + scratch[2] = hexdig[hi >> 12]; + scratch[3] = hexdig[(hi >> 8) & 0xf]; + scratch[4] = hexdig[(hi >> 4) & 0xf]; + scratch[5] = hexdig[hi & 0xf]; + + scratch[8] = hexdig[lo >> 12]; + scratch[9] = hexdig[(lo >> 8) & 0xf]; + scratch[10] = hexdig[(lo >> 4) & 0xf]; + scratch[11] = hexdig[lo & 0xf]; + + fbuffer_append(search->buffer, scratch, 12); + } + + break; + } + } + search->cursor = (search->ptr += ch_len); +} + +static void convert_UTF8_to_ASCII_only_JSON(search_state *search, const unsigned char escape_table[256]) +{ + unsigned char ch_len; + while ((ch_len = search_ascii_only_escape(search, escape_table))) { + full_escape_UTF8_char(search, ch_len); + } +} + +/* + * Document-module: JSON::Ext::Generator + * + * This is the JSON generator implemented as a C extension. It can be + * configured to be used by setting + * + * JSON.generator = JSON::Ext::Generator + * + * with the method generator= in JSON. + * + */ + +/* Explanation of the following: that's the only way to not pollute + * standard library's docs with GeneratorMethods:: which + * are uninformative and take a large place in a list of classes + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Array + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Bignum + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::FalseClass + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Fixnum + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Float + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Hash + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Integer + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::NilClass + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::Object + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::String + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::String::Extend + * :nodoc: + */ + +/* + * Document-module: JSON::Ext::Generator::GeneratorMethods::TrueClass + * :nodoc: + */ + +/* + * call-seq: to_json(state = nil) + * + * Returns a JSON string containing a JSON object, that is generated from + * this Hash instance. + * _state_ is a JSON::State object, that can also be used to configure the + * produced JSON string output further. + */ +static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_object, Qfalse); +} + +/* + * call-seq: to_json(state = nil) + * + * Returns a JSON string containing a JSON array, that is generated from + * this Array instance. + * _state_ is a JSON::State object, that can also be used to configure the + * produced JSON string output further. + */ +static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_array, Qfalse); +} + +#ifdef RUBY_INTEGER_UNIFICATION +/* + * call-seq: to_json(*) + * + * Returns a JSON string representation for this Integer number. + */ +static VALUE mInteger_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_integer, Qfalse); +} + +#else +/* + * call-seq: to_json(*) + * + * Returns a JSON string representation for this Integer number. + */ +static VALUE mFixnum_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_fixnum, Qfalse); +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string representation for this Integer number. + */ +static VALUE mBignum_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_bignum, Qfalse); +} +#endif + +/* + * call-seq: to_json(*) + * + * Returns a JSON string representation for this Float number. + */ +static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_float, Qfalse); +} + +/* + * call-seq: String.included(modul) + * + * Extends _modul_ with the String::Extend module. + */ +static VALUE mString_included_s(VALUE self, VALUE modul) +{ + VALUE result = rb_funcall(modul, i_extend, 1, mString_Extend); + rb_call_super(1, &modul); + return result; +} + +/* + * call-seq: to_json(*) + * + * This string should be encoded with UTF-8 A call to this method + * returns a JSON string encoded with UTF16 big endian characters as + * \u????. + */ +static VALUE mString_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + VALUE Vstate = cState_from_state_s(cState, argc == 1 ? argv[0] : Qnil); + return cState_partial_generate(Vstate, self, generate_json_string, Qfalse); +} + +/* + * call-seq: to_json_raw_object() + * + * This method creates a raw object hash, that can be nested into + * other data structures and will be generated as a raw string. This + * method should be used, if you want to convert raw strings to JSON + * instead of UTF-8 strings, e. g. binary data. + */ +static VALUE mString_to_json_raw_object(VALUE self) +{ + VALUE ary; + VALUE result = rb_hash_new(); + rb_hash_aset(result, rb_funcall(mJSON, i_create_id, 0), rb_class_name(rb_obj_class(self))); + ary = rb_funcall(self, i_unpack, 1, rb_str_new2("C*")); + rb_hash_aset(result, rb_utf8_str_new_lit("raw"), ary); + return result; +} + +/* + * call-seq: to_json_raw(*args) + * + * This method creates a JSON text from the result of a call to + * to_json_raw_object of this String. + */ +static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self) +{ + VALUE obj = mString_to_json_raw_object(self); + Check_Type(obj, T_HASH); + return mHash_to_json(argc, argv, obj); +} + +/* + * call-seq: json_create(o) + * + * Raw Strings are JSON Objects (the raw bytes are stored in an array for the + * key "raw"). The Ruby String can be created by this module method. + */ +static VALUE mString_Extend_json_create(VALUE self, VALUE o) +{ + VALUE ary; + Check_Type(o, T_HASH); + ary = rb_hash_aref(o, rb_str_new2("raw")); + return rb_funcall(ary, i_pack, 1, rb_str_new2("C*")); +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string for true: 'true'. + */ +static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + return rb_utf8_str_new("true", 4); +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string for false: 'false'. + */ +static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + return rb_utf8_str_new("false", 5); +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string for nil: 'null'. + */ +static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 0, 1); + return rb_utf8_str_new("null", 4); +} + +/* + * call-seq: to_json(*) + * + * Converts this object to a string (calling #to_s), converts + * it to a JSON string, and returns the result. This is a fallback, if no + * special method #to_json was defined for some object. + */ +static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE state; + VALUE string = rb_funcall(self, i_to_s, 0); + rb_scan_args(argc, argv, "01", &state); + Check_Type(string, T_STRING); + state = cState_from_state_s(cState, state); + return cState_partial_generate(state, string, generate_json_string, Qfalse); +} + +static void State_mark(void *ptr) +{ + JSON_Generator_State *state = ptr; + rb_gc_mark_movable(state->indent); + rb_gc_mark_movable(state->space); + rb_gc_mark_movable(state->space_before); + rb_gc_mark_movable(state->object_nl); + rb_gc_mark_movable(state->array_nl); + rb_gc_mark_movable(state->as_json); +} + +static void State_compact(void *ptr) +{ + JSON_Generator_State *state = ptr; + state->indent = rb_gc_location(state->indent); + state->space = rb_gc_location(state->space); + state->space_before = rb_gc_location(state->space_before); + state->object_nl = rb_gc_location(state->object_nl); + state->array_nl = rb_gc_location(state->array_nl); + state->as_json = rb_gc_location(state->as_json); +} + +static void State_free(void *ptr) +{ + JSON_Generator_State *state = ptr; + ruby_xfree(state); +} + +static size_t State_memsize(const void *ptr) +{ + return sizeof(JSON_Generator_State); +} + +#ifndef HAVE_RB_EXT_RACTOR_SAFE +# undef RUBY_TYPED_FROZEN_SHAREABLE +# define RUBY_TYPED_FROZEN_SHAREABLE 0 +#endif + +static const rb_data_type_t JSON_Generator_State_type = { + "JSON/Generator/State", + { + .dmark = State_mark, + .dfree = State_free, + .dsize = State_memsize, + .dcompact = State_compact, + }, + 0, 0, + RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_FROZEN_SHAREABLE, +}; + +static void state_init(JSON_Generator_State *state) +{ + state->max_nesting = 100; + state->buffer_initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT; +} + +static VALUE cState_s_allocate(VALUE klass) +{ + JSON_Generator_State *state; + VALUE obj = TypedData_Make_Struct(klass, JSON_Generator_State, &JSON_Generator_State_type, state); + state_init(state); + return obj; +} + +static void vstate_spill(struct generate_json_data *data) +{ + VALUE vstate = cState_s_allocate(cState); + GET_STATE(vstate); + MEMCPY(state, data->state, JSON_Generator_State, 1); + data->state = state; + data->vstate = vstate; + RB_OBJ_WRITTEN(vstate, Qundef, state->indent); + RB_OBJ_WRITTEN(vstate, Qundef, state->space); + RB_OBJ_WRITTEN(vstate, Qundef, state->space_before); + RB_OBJ_WRITTEN(vstate, Qundef, state->object_nl); + RB_OBJ_WRITTEN(vstate, Qundef, state->array_nl); + RB_OBJ_WRITTEN(vstate, Qundef, state->as_json); +} + +static inline VALUE vstate_get(struct generate_json_data *data) +{ + if (RB_UNLIKELY(!data->vstate)) { + vstate_spill(data); + } + return data->vstate; +} + +struct hash_foreach_arg { + struct generate_json_data *data; + int iter; +}; + +static VALUE +convert_string_subclass(VALUE key) +{ + VALUE key_to_s = rb_funcall(key, i_to_s, 0); + + if (RB_UNLIKELY(!RB_TYPE_P(key_to_s, T_STRING))) { + VALUE cname = rb_obj_class(key); + rb_raise(rb_eTypeError, + "can't convert %"PRIsVALUE" to %s (%"PRIsVALUE"#%s gives %"PRIsVALUE")", + cname, "String", cname, "to_s", rb_obj_class(key_to_s)); + } + + return key_to_s; +} + +static int +json_object_i(VALUE key, VALUE val, VALUE _arg) +{ + struct hash_foreach_arg *arg = (struct hash_foreach_arg *)_arg; + struct generate_json_data *data = arg->data; + + FBuffer *buffer = data->buffer; + JSON_Generator_State *state = data->state; + + long depth = state->depth; + int j; + + if (arg->iter > 0) fbuffer_append_char(buffer, ','); + if (RB_UNLIKELY(data->state->object_nl)) { + fbuffer_append_str(buffer, data->state->object_nl); + } + if (RB_UNLIKELY(data->state->indent)) { + for (j = 0; j < depth; j++) { + fbuffer_append_str(buffer, data->state->indent); + } + } + + VALUE key_to_s; + switch (rb_type(key)) { + case T_STRING: + if (RB_LIKELY(RBASIC_CLASS(key) == rb_cString)) { + key_to_s = key; + } else { + key_to_s = convert_string_subclass(key); + } + break; + case T_SYMBOL: + key_to_s = rb_sym2str(key); + break; + default: + key_to_s = rb_convert_type(key, T_STRING, "String", "to_s"); + break; + } + + if (RB_LIKELY(RBASIC_CLASS(key_to_s) == rb_cString)) { + generate_json_string(buffer, data, key_to_s); + } else { + generate_json(buffer, data, key_to_s); + } + if (RB_UNLIKELY(state->space_before)) fbuffer_append_str(buffer, data->state->space_before); + fbuffer_append_char(buffer, ':'); + if (RB_UNLIKELY(state->space)) fbuffer_append_str(buffer, data->state->space); + generate_json(buffer, data, val); + + arg->iter++; + return ST_CONTINUE; +} + +static inline long increase_depth(struct generate_json_data *data) +{ + JSON_Generator_State *state = data->state; + long depth = ++state->depth; + if (RB_UNLIKELY(depth > state->max_nesting && state->max_nesting)) { + rb_raise(eNestingError, "nesting of %ld is too deep", --state->depth); + } + return depth; +} + +static void generate_json_object(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + int j; + long depth = increase_depth(data); + + if (RHASH_SIZE(obj) == 0) { + fbuffer_append(buffer, "{}", 2); + --data->state->depth; + return; + } + + fbuffer_append_char(buffer, '{'); + + struct hash_foreach_arg arg = { + .data = data, + .iter = 0, + }; + rb_hash_foreach(obj, json_object_i, (VALUE)&arg); + + depth = --data->state->depth; + if (RB_UNLIKELY(data->state->object_nl)) { + fbuffer_append_str(buffer, data->state->object_nl); + if (RB_UNLIKELY(data->state->indent)) { + for (j = 0; j < depth; j++) { + fbuffer_append_str(buffer, data->state->indent); + } + } + } + fbuffer_append_char(buffer, '}'); +} + +static void generate_json_array(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + int i, j; + long depth = increase_depth(data); + + if (RARRAY_LEN(obj) == 0) { + fbuffer_append(buffer, "[]", 2); + --data->state->depth; + return; + } + + fbuffer_append_char(buffer, '['); + if (RB_UNLIKELY(data->state->array_nl)) fbuffer_append_str(buffer, data->state->array_nl); + for (i = 0; i < RARRAY_LEN(obj); i++) { + if (i > 0) { + fbuffer_append_char(buffer, ','); + if (RB_UNLIKELY(data->state->array_nl)) fbuffer_append_str(buffer, data->state->array_nl); + } + if (RB_UNLIKELY(data->state->indent)) { + for (j = 0; j < depth; j++) { + fbuffer_append_str(buffer, data->state->indent); + } + } + generate_json(buffer, data, RARRAY_AREF(obj, i)); + } + data->state->depth = --depth; + if (RB_UNLIKELY(data->state->array_nl)) { + fbuffer_append_str(buffer, data->state->array_nl); + if (RB_UNLIKELY(data->state->indent)) { + for (j = 0; j < depth; j++) { + fbuffer_append_str(buffer, data->state->indent); + } + } + } + fbuffer_append_char(buffer, ']'); +} + +static inline int enc_utf8_compatible_p(int enc_idx) +{ + if (enc_idx == usascii_encindex) return 1; + if (enc_idx == utf8_encindex) return 1; + return 0; +} + +static VALUE encode_json_string_try(VALUE str) +{ + return rb_funcall(str, i_encode, 1, Encoding_UTF_8); +} + +static VALUE encode_json_string_rescue(VALUE str, VALUE exception) +{ + raise_generator_error_str(str, rb_funcall(exception, rb_intern("message"), 0)); + return Qundef; +} + +static inline VALUE ensure_valid_encoding(VALUE str) +{ + int encindex = RB_ENCODING_GET(str); + VALUE utf8_string; + if (RB_UNLIKELY(!enc_utf8_compatible_p(encindex))) { + if (encindex == binary_encindex) { + utf8_string = rb_enc_associate_index(rb_str_dup(str), utf8_encindex); + switch (rb_enc_str_coderange(utf8_string)) { + case ENC_CODERANGE_7BIT: + return utf8_string; + case ENC_CODERANGE_VALID: + // For historical reason, we silently reinterpret binary strings as UTF-8 if it would work. + // TODO: Raise in 3.0.0 + rb_warn("JSON.generate: UTF-8 string passed as BINARY, this will raise an encoding error in json 3.0"); + return utf8_string; + break; + } + } + + str = rb_rescue(encode_json_string_try, str, encode_json_string_rescue, str); + } + return str; +} + +static void generate_json_string(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + obj = ensure_valid_encoding(obj); + + fbuffer_append_char(buffer, '"'); + + long len; + search_state search; + search.buffer = buffer; + RSTRING_GETMEM(obj, search.ptr, len); + search.cursor = search.ptr; + search.end = search.ptr + len; + +#ifdef HAVE_SIMD + search.matches_mask = 0; + search.has_matches = false; + search.chunk_base = NULL; +#endif /* HAVE_SIMD */ + + switch (rb_enc_str_coderange(obj)) { + case ENC_CODERANGE_7BIT: + case ENC_CODERANGE_VALID: + if (RB_UNLIKELY(data->state->ascii_only)) { + convert_UTF8_to_ASCII_only_JSON(&search, data->state->script_safe ? script_safe_escape_table : ascii_only_escape_table); + } else if (RB_UNLIKELY(data->state->script_safe)) { + convert_UTF8_to_script_safe_JSON(&search); + } else { + convert_UTF8_to_JSON(&search); + } + break; + default: + raise_generator_error(obj, "source sequence is illegal/malformed utf-8"); + break; + } + fbuffer_append_char(buffer, '"'); +} + +static void generate_json_fallback(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + VALUE tmp; + if (rb_respond_to(obj, i_to_json)) { + tmp = rb_funcall(obj, i_to_json, 1, vstate_get(data)); + Check_Type(tmp, T_STRING); + fbuffer_append_str(buffer, tmp); + } else { + tmp = rb_funcall(obj, i_to_s, 0); + Check_Type(tmp, T_STRING); + generate_json_string(buffer, data, tmp); + } +} + +static inline void generate_json_symbol(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + if (data->state->strict) { + generate_json_string(buffer, data, rb_sym2str(obj)); + } else { + generate_json_fallback(buffer, data, obj); + } +} + +static void generate_json_null(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + fbuffer_append(buffer, "null", 4); +} + +static void generate_json_false(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + fbuffer_append(buffer, "false", 5); +} + +static void generate_json_true(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + fbuffer_append(buffer, "true", 4); +} + +static void generate_json_fixnum(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + fbuffer_append_long(buffer, FIX2LONG(obj)); +} + +static void generate_json_bignum(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + VALUE tmp = rb_funcall(obj, i_to_s, 0); + fbuffer_append_str(buffer, tmp); +} + +#ifdef RUBY_INTEGER_UNIFICATION +static void generate_json_integer(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + if (FIXNUM_P(obj)) + generate_json_fixnum(buffer, data, obj); + else + generate_json_bignum(buffer, data, obj); +} +#endif + +static void generate_json_float(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + double value = RFLOAT_VALUE(obj); + char allow_nan = data->state->allow_nan; + if (isinf(value) || isnan(value)) { + /* for NaN and Infinity values we either raise an error or rely on Float#to_s. */ + if (!allow_nan) { + if (data->state->strict && data->state->as_json) { + VALUE casted_obj = rb_proc_call_with_block(data->state->as_json, 1, &obj, Qnil); + if (casted_obj != obj) { + increase_depth(data); + generate_json(buffer, data, casted_obj); + data->state->depth--; + return; + } + } + raise_generator_error(obj, "%"PRIsVALUE" not allowed in JSON", rb_funcall(obj, i_to_s, 0)); + } + + VALUE tmp = rb_funcall(obj, i_to_s, 0); + fbuffer_append_str(buffer, tmp); + return; + } + + /* This implementation writes directly into the buffer. We reserve + * the 28 characters that fpconv_dtoa states as its maximum. + */ + fbuffer_inc_capa(buffer, 28); + char* d = buffer->ptr + buffer->len; + int len = fpconv_dtoa(value, d); + + /* fpconv_dtoa converts a float to its shortest string representation, + * but it adds a ".0" if this is a plain integer. + */ + fbuffer_consumed(buffer, len); +} + +static void generate_json_fragment(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + VALUE fragment = RSTRUCT_GET(obj, 0); + Check_Type(fragment, T_STRING); + fbuffer_append_str(buffer, fragment); +} + +static void generate_json(FBuffer *buffer, struct generate_json_data *data, VALUE obj) +{ + bool as_json_called = false; +start: + if (obj == Qnil) { + generate_json_null(buffer, data, obj); + } else if (obj == Qfalse) { + generate_json_false(buffer, data, obj); + } else if (obj == Qtrue) { + generate_json_true(buffer, data, obj); + } else if (RB_SPECIAL_CONST_P(obj)) { + if (RB_FIXNUM_P(obj)) { + generate_json_fixnum(buffer, data, obj); + } else if (RB_FLONUM_P(obj)) { + generate_json_float(buffer, data, obj); + } else if (RB_STATIC_SYM_P(obj)) { + generate_json_symbol(buffer, data, obj); + } else { + goto general; + } + } else { + VALUE klass = RBASIC_CLASS(obj); + switch (RB_BUILTIN_TYPE(obj)) { + case T_BIGNUM: + generate_json_bignum(buffer, data, obj); + break; + case T_HASH: + if (klass != rb_cHash) goto general; + generate_json_object(buffer, data, obj); + break; + case T_ARRAY: + if (klass != rb_cArray) goto general; + generate_json_array(buffer, data, obj); + break; + case T_STRING: + if (klass != rb_cString) goto general; + generate_json_string(buffer, data, obj); + break; + case T_SYMBOL: + generate_json_symbol(buffer, data, obj); + break; + case T_FLOAT: + if (klass != rb_cFloat) goto general; + generate_json_float(buffer, data, obj); + break; + case T_STRUCT: + if (klass != cFragment) goto general; + generate_json_fragment(buffer, data, obj); + break; + default: + general: + if (data->state->strict) { + if (RTEST(data->state->as_json) && !as_json_called) { + obj = rb_proc_call_with_block(data->state->as_json, 1, &obj, Qnil); + as_json_called = true; + goto start; + } else { + raise_generator_error(obj, "%"PRIsVALUE" not allowed in JSON", CLASS_OF(obj)); + } + } else { + generate_json_fallback(buffer, data, obj); + } + } + } +} + +static VALUE generate_json_try(VALUE d) +{ + struct generate_json_data *data = (struct generate_json_data *)d; + + data->func(data->buffer, data, data->obj); + + return Qnil; +} + +static VALUE generate_json_rescue(VALUE d, VALUE exc) +{ + struct generate_json_data *data = (struct generate_json_data *)d; + fbuffer_free(data->buffer); + + rb_exc_raise(exc); + + return Qundef; +} + +static VALUE cState_partial_generate(VALUE self, VALUE obj, generator_func func, VALUE io) +{ + GET_STATE(self); + + char stack_buffer[FBUFFER_STACK_SIZE]; + FBuffer buffer = { + .io = RTEST(io) ? io : Qfalse, + }; + fbuffer_stack_init(&buffer, state->buffer_initial_length, stack_buffer, FBUFFER_STACK_SIZE); + + struct generate_json_data data = { + .buffer = &buffer, + .vstate = self, + .state = state, + .obj = obj, + .func = func + }; + rb_rescue(generate_json_try, (VALUE)&data, generate_json_rescue, (VALUE)&data); + + return fbuffer_finalize(&buffer); +} + +/* call-seq: + * generate(obj) -> String + * generate(obj, anIO) -> anIO + * + * Generates a valid JSON document from object +obj+ and returns the + * result. If no valid JSON document can be created this method raises a + * GeneratorError exception. + */ +static VALUE cState_generate(int argc, VALUE *argv, VALUE self) +{ + rb_check_arity(argc, 1, 2); + VALUE obj = argv[0]; + VALUE io = argc > 1 ? argv[1] : Qnil; + VALUE result = cState_partial_generate(self, obj, generate_json, io); + GET_STATE(self); + (void)state; + return result; +} + +static VALUE cState_initialize(int argc, VALUE *argv, VALUE self) +{ + rb_warn("The json gem extension was loaded with the stdlib ruby code. You should upgrade rubygems with `gem update --system`"); + return self; +} + +/* + * call-seq: initialize_copy(orig) + * + * Initializes this object from orig if it can be duplicated/cloned and returns + * it. +*/ +static VALUE cState_init_copy(VALUE obj, VALUE orig) +{ + JSON_Generator_State *objState, *origState; + + if (obj == orig) return obj; + GET_STATE_TO(obj, objState); + GET_STATE_TO(orig, origState); + if (!objState) rb_raise(rb_eArgError, "unallocated JSON::State"); + + MEMCPY(objState, origState, JSON_Generator_State, 1); + objState->indent = origState->indent; + objState->space = origState->space; + objState->space_before = origState->space_before; + objState->object_nl = origState->object_nl; + objState->array_nl = origState->array_nl; + objState->as_json = origState->as_json; + return obj; +} + +/* + * call-seq: from_state(opts) + * + * Creates a State object from _opts_, which ought to be Hash to create a + * new State instance configured by _opts_, something else to create an + * unconfigured instance. If _opts_ is a State object, it is just returned. + */ +static VALUE cState_from_state_s(VALUE self, VALUE opts) +{ + if (rb_obj_is_kind_of(opts, self)) { + return opts; + } else if (rb_obj_is_kind_of(opts, rb_cHash)) { + return rb_funcall(self, i_new, 1, opts); + } else { + return rb_class_new_instance(0, NULL, cState); + } +} + +/* + * call-seq: indent() + * + * Returns the string that is used to indent levels in the JSON text. + */ +static VALUE cState_indent(VALUE self) +{ + GET_STATE(self); + return state->indent ? state->indent : rb_str_freeze(rb_utf8_str_new("", 0)); +} + +static VALUE string_config(VALUE config) +{ + if (RTEST(config)) { + Check_Type(config, T_STRING); + if (RSTRING_LEN(config)) { + return rb_str_new_frozen(config); + } + } + return Qfalse; +} + +/* + * call-seq: indent=(indent) + * + * Sets the string that is used to indent levels in the JSON text. + */ +static VALUE cState_indent_set(VALUE self, VALUE indent) +{ + GET_STATE(self); + RB_OBJ_WRITE(self, &state->indent, string_config(indent)); + return Qnil; +} + +/* + * call-seq: space() + * + * Returns the string that is used to insert a space between the tokens in a JSON + * string. + */ +static VALUE cState_space(VALUE self) +{ + GET_STATE(self); + return state->space ? state->space : rb_str_freeze(rb_utf8_str_new("", 0)); +} + +/* + * call-seq: space=(space) + * + * Sets _space_ to the string that is used to insert a space between the tokens in a JSON + * string. + */ +static VALUE cState_space_set(VALUE self, VALUE space) +{ + GET_STATE(self); + RB_OBJ_WRITE(self, &state->space, string_config(space)); + return Qnil; +} + +/* + * call-seq: space_before() + * + * Returns the string that is used to insert a space before the ':' in JSON objects. + */ +static VALUE cState_space_before(VALUE self) +{ + GET_STATE(self); + return state->space_before ? state->space_before : rb_str_freeze(rb_utf8_str_new("", 0)); +} + +/* + * call-seq: space_before=(space_before) + * + * Sets the string that is used to insert a space before the ':' in JSON objects. + */ +static VALUE cState_space_before_set(VALUE self, VALUE space_before) +{ + GET_STATE(self); + RB_OBJ_WRITE(self, &state->space_before, string_config(space_before)); + return Qnil; +} + +/* + * call-seq: object_nl() + * + * This string is put at the end of a line that holds a JSON object (or + * Hash). + */ +static VALUE cState_object_nl(VALUE self) +{ + GET_STATE(self); + return state->object_nl ? state->object_nl : rb_str_freeze(rb_utf8_str_new("", 0)); +} + +/* + * call-seq: object_nl=(object_nl) + * + * This string is put at the end of a line that holds a JSON object (or + * Hash). + */ +static VALUE cState_object_nl_set(VALUE self, VALUE object_nl) +{ + GET_STATE(self); + RB_OBJ_WRITE(self, &state->object_nl, string_config(object_nl)); + return Qnil; +} + +/* + * call-seq: array_nl() + * + * This string is put at the end of a line that holds a JSON array. + */ +static VALUE cState_array_nl(VALUE self) +{ + GET_STATE(self); + return state->array_nl ? state->array_nl : rb_str_freeze(rb_utf8_str_new("", 0)); +} + +/* + * call-seq: array_nl=(array_nl) + * + * This string is put at the end of a line that holds a JSON array. + */ +static VALUE cState_array_nl_set(VALUE self, VALUE array_nl) +{ + GET_STATE(self); + RB_OBJ_WRITE(self, &state->array_nl, string_config(array_nl)); + return Qnil; +} + +/* + * call-seq: as_json() + * + * This string is put at the end of a line that holds a JSON array. + */ +static VALUE cState_as_json(VALUE self) +{ + GET_STATE(self); + return state->as_json; +} + +/* + * call-seq: as_json=(as_json) + * + * This string is put at the end of a line that holds a JSON array. + */ +static VALUE cState_as_json_set(VALUE self, VALUE as_json) +{ + GET_STATE(self); + RB_OBJ_WRITE(self, &state->as_json, rb_convert_type(as_json, T_DATA, "Proc", "to_proc")); + return Qnil; +} + +/* +* call-seq: check_circular? +* +* Returns true, if circular data structures should be checked, +* otherwise returns false. +*/ +static VALUE cState_check_circular_p(VALUE self) +{ + GET_STATE(self); + return state->max_nesting ? Qtrue : Qfalse; +} + +/* + * call-seq: max_nesting + * + * This integer returns the maximum level of data structure nesting in + * the generated JSON, max_nesting = 0 if no maximum is checked. + */ +static VALUE cState_max_nesting(VALUE self) +{ + GET_STATE(self); + return LONG2FIX(state->max_nesting); +} + +static long long_config(VALUE num) +{ + return RTEST(num) ? FIX2LONG(num) : 0; +} + +/* + * call-seq: max_nesting=(depth) + * + * This sets the maximum level of data structure nesting in the generated JSON + * to the integer depth, max_nesting = 0 if no maximum should be checked. + */ +static VALUE cState_max_nesting_set(VALUE self, VALUE depth) +{ + GET_STATE(self); + state->max_nesting = long_config(depth); + return Qnil; +} + +/* + * call-seq: script_safe + * + * If this boolean is true, the forward slashes will be escaped in + * the json output. + */ +static VALUE cState_script_safe(VALUE self) +{ + GET_STATE(self); + return state->script_safe ? Qtrue : Qfalse; +} + +/* + * call-seq: script_safe=(enable) + * + * This sets whether or not the forward slashes will be escaped in + * the json output. + */ +static VALUE cState_script_safe_set(VALUE self, VALUE enable) +{ + GET_STATE(self); + state->script_safe = RTEST(enable); + return Qnil; +} + +/* + * call-seq: strict + * + * If this boolean is false, types unsupported by the JSON format will + * be serialized as strings. + * If this boolean is true, types unsupported by the JSON format will + * raise a JSON::GeneratorError. + */ +static VALUE cState_strict(VALUE self) +{ + GET_STATE(self); + return state->strict ? Qtrue : Qfalse; +} + +/* + * call-seq: strict=(enable) + * + * This sets whether or not to serialize types unsupported by the + * JSON format as strings. + * If this boolean is false, types unsupported by the JSON format will + * be serialized as strings. + * If this boolean is true, types unsupported by the JSON format will + * raise a JSON::GeneratorError. + */ +static VALUE cState_strict_set(VALUE self, VALUE enable) +{ + GET_STATE(self); + state->strict = RTEST(enable); + return Qnil; +} + +/* + * call-seq: allow_nan? + * + * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise + * returns false. + */ +static VALUE cState_allow_nan_p(VALUE self) +{ + GET_STATE(self); + return state->allow_nan ? Qtrue : Qfalse; +} + +/* + * call-seq: allow_nan=(enable) + * + * This sets whether or not to serialize NaN, Infinity, and -Infinity + */ +static VALUE cState_allow_nan_set(VALUE self, VALUE enable) +{ + GET_STATE(self); + state->allow_nan = RTEST(enable); + return Qnil; +} + +/* + * call-seq: ascii_only? + * + * Returns true, if only ASCII characters should be generated. Otherwise + * returns false. + */ +static VALUE cState_ascii_only_p(VALUE self) +{ + GET_STATE(self); + return state->ascii_only ? Qtrue : Qfalse; +} + +/* + * call-seq: ascii_only=(enable) + * + * This sets whether only ASCII characters should be generated. + */ +static VALUE cState_ascii_only_set(VALUE self, VALUE enable) +{ + GET_STATE(self); + state->ascii_only = RTEST(enable); + return Qnil; +} + +/* + * call-seq: depth + * + * This integer returns the current depth of data structure nesting. + */ +static VALUE cState_depth(VALUE self) +{ + GET_STATE(self); + return LONG2FIX(state->depth); +} + +/* + * call-seq: depth=(depth) + * + * This sets the maximum level of data structure nesting in the generated JSON + * to the integer depth, max_nesting = 0 if no maximum should be checked. + */ +static VALUE cState_depth_set(VALUE self, VALUE depth) +{ + GET_STATE(self); + state->depth = long_config(depth); + return Qnil; +} + +/* + * call-seq: buffer_initial_length + * + * This integer returns the current initial length of the buffer. + */ +static VALUE cState_buffer_initial_length(VALUE self) +{ + GET_STATE(self); + return LONG2FIX(state->buffer_initial_length); +} + +static void buffer_initial_length_set(JSON_Generator_State *state, VALUE buffer_initial_length) +{ + Check_Type(buffer_initial_length, T_FIXNUM); + long initial_length = FIX2LONG(buffer_initial_length); + if (initial_length > 0) { + state->buffer_initial_length = initial_length; + } +} + +/* + * call-seq: buffer_initial_length=(length) + * + * This sets the initial length of the buffer to +length+, if +length+ > 0, + * otherwise its value isn't changed. + */ +static VALUE cState_buffer_initial_length_set(VALUE self, VALUE buffer_initial_length) +{ + GET_STATE(self); + buffer_initial_length_set(state, buffer_initial_length); + return Qnil; +} + +struct configure_state_data { + JSON_Generator_State *state; + VALUE vstate; // Ruby object that owns the state, or Qfalse if stack-allocated +}; + +static inline void state_write_value(struct configure_state_data *data, VALUE *field, VALUE value) +{ + if (RTEST(data->vstate)) { + RB_OBJ_WRITE(data->vstate, field, value); + } else { + *field = value; + } +} + +static int configure_state_i(VALUE key, VALUE val, VALUE _arg) +{ + struct configure_state_data *data = (struct configure_state_data *)_arg; + JSON_Generator_State *state = data->state; + + if (key == sym_indent) { state_write_value(data, &state->indent, string_config(val)); } + else if (key == sym_space) { state_write_value(data, &state->space, string_config(val)); } + else if (key == sym_space_before) { state_write_value(data, &state->space_before, string_config(val)); } + else if (key == sym_object_nl) { state_write_value(data, &state->object_nl, string_config(val)); } + else if (key == sym_array_nl) { state_write_value(data, &state->array_nl, string_config(val)); } + else if (key == sym_max_nesting) { state->max_nesting = long_config(val); } + else if (key == sym_allow_nan) { state->allow_nan = RTEST(val); } + else if (key == sym_ascii_only) { state->ascii_only = RTEST(val); } + else if (key == sym_depth) { state->depth = long_config(val); } + else if (key == sym_buffer_initial_length) { buffer_initial_length_set(state, val); } + else if (key == sym_script_safe) { state->script_safe = RTEST(val); } + else if (key == sym_escape_slash) { state->script_safe = RTEST(val); } + else if (key == sym_strict) { state->strict = RTEST(val); } + else if (key == sym_as_json) { + VALUE proc = RTEST(val) ? rb_convert_type(val, T_DATA, "Proc", "to_proc") : Qfalse; + state_write_value(data, &state->as_json, proc); + } + return ST_CONTINUE; +} + +static void configure_state(JSON_Generator_State *state, VALUE vstate, VALUE config) +{ + if (!RTEST(config)) return; + + Check_Type(config, T_HASH); + + if (!RHASH_SIZE(config)) return; + + struct configure_state_data data = { + .state = state, + .vstate = vstate + }; + + // We assume in most cases few keys are set so it's faster to go over + // the provided keys than to check all possible keys. + rb_hash_foreach(config, configure_state_i, (VALUE)&data); +} + +static VALUE cState_configure(VALUE self, VALUE opts) +{ + GET_STATE(self); + configure_state(state, self, opts); + return self; +} + +static VALUE cState_m_generate(VALUE klass, VALUE obj, VALUE opts, VALUE io) +{ + JSON_Generator_State state = {0}; + state_init(&state); + configure_state(&state, Qfalse, opts); + + char stack_buffer[FBUFFER_STACK_SIZE]; + FBuffer buffer = { + .io = RTEST(io) ? io : Qfalse, + }; + fbuffer_stack_init(&buffer, state.buffer_initial_length, stack_buffer, FBUFFER_STACK_SIZE); + + struct generate_json_data data = { + .buffer = &buffer, + .vstate = Qfalse, + .state = &state, + .obj = obj, + .func = generate_json, + }; + rb_rescue(generate_json_try, (VALUE)&data, generate_json_rescue, (VALUE)&data); + + return fbuffer_finalize(&buffer); +} + +/* + * + */ +void Init_generator(void) +{ +#ifdef HAVE_RB_EXT_RACTOR_SAFE + rb_ext_ractor_safe(true); +#endif + +#undef rb_intern + rb_require("json/common"); + + mJSON = rb_define_module("JSON"); + + rb_global_variable(&cFragment); + cFragment = rb_const_get(mJSON, rb_intern("Fragment")); + + VALUE mExt = rb_define_module_under(mJSON, "Ext"); + VALUE mGenerator = rb_define_module_under(mExt, "Generator"); + + rb_global_variable(&eGeneratorError); + eGeneratorError = rb_path2class("JSON::GeneratorError"); + + rb_global_variable(&eNestingError); + eNestingError = rb_path2class("JSON::NestingError"); + + cState = rb_define_class_under(mGenerator, "State", rb_cObject); + rb_define_alloc_func(cState, cState_s_allocate); + rb_define_singleton_method(cState, "from_state", cState_from_state_s, 1); + rb_define_method(cState, "initialize", cState_initialize, -1); + rb_define_alias(cState, "initialize", "initialize"); // avoid method redefinition warnings + rb_define_private_method(cState, "_configure", cState_configure, 1); + + rb_define_method(cState, "initialize_copy", cState_init_copy, 1); + rb_define_method(cState, "indent", cState_indent, 0); + rb_define_method(cState, "indent=", cState_indent_set, 1); + rb_define_method(cState, "space", cState_space, 0); + rb_define_method(cState, "space=", cState_space_set, 1); + rb_define_method(cState, "space_before", cState_space_before, 0); + rb_define_method(cState, "space_before=", cState_space_before_set, 1); + rb_define_method(cState, "object_nl", cState_object_nl, 0); + rb_define_method(cState, "object_nl=", cState_object_nl_set, 1); + rb_define_method(cState, "array_nl", cState_array_nl, 0); + rb_define_method(cState, "array_nl=", cState_array_nl_set, 1); + rb_define_method(cState, "as_json", cState_as_json, 0); + rb_define_method(cState, "as_json=", cState_as_json_set, 1); + rb_define_method(cState, "max_nesting", cState_max_nesting, 0); + rb_define_method(cState, "max_nesting=", cState_max_nesting_set, 1); + rb_define_method(cState, "script_safe", cState_script_safe, 0); + rb_define_method(cState, "script_safe?", cState_script_safe, 0); + rb_define_method(cState, "script_safe=", cState_script_safe_set, 1); + rb_define_alias(cState, "escape_slash", "script_safe"); + rb_define_alias(cState, "escape_slash?", "script_safe?"); + rb_define_alias(cState, "escape_slash=", "script_safe="); + rb_define_method(cState, "strict", cState_strict, 0); + rb_define_method(cState, "strict?", cState_strict, 0); + rb_define_method(cState, "strict=", cState_strict_set, 1); + rb_define_method(cState, "check_circular?", cState_check_circular_p, 0); + rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0); + rb_define_method(cState, "allow_nan=", cState_allow_nan_set, 1); + rb_define_method(cState, "ascii_only?", cState_ascii_only_p, 0); + rb_define_method(cState, "ascii_only=", cState_ascii_only_set, 1); + rb_define_method(cState, "depth", cState_depth, 0); + rb_define_method(cState, "depth=", cState_depth_set, 1); + rb_define_method(cState, "buffer_initial_length", cState_buffer_initial_length, 0); + rb_define_method(cState, "buffer_initial_length=", cState_buffer_initial_length_set, 1); + rb_define_method(cState, "generate", cState_generate, -1); + rb_define_alias(cState, "generate_new", "generate"); // :nodoc: + + rb_define_singleton_method(cState, "generate", cState_m_generate, 3); + + VALUE mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods"); + + VALUE mObject = rb_define_module_under(mGeneratorMethods, "Object"); + rb_define_method(mObject, "to_json", mObject_to_json, -1); + + VALUE mHash = rb_define_module_under(mGeneratorMethods, "Hash"); + rb_define_method(mHash, "to_json", mHash_to_json, -1); + + VALUE mArray = rb_define_module_under(mGeneratorMethods, "Array"); + rb_define_method(mArray, "to_json", mArray_to_json, -1); + +#ifdef RUBY_INTEGER_UNIFICATION + VALUE mInteger = rb_define_module_under(mGeneratorMethods, "Integer"); + rb_define_method(mInteger, "to_json", mInteger_to_json, -1); +#else + VALUE mFixnum = rb_define_module_under(mGeneratorMethods, "Fixnum"); + rb_define_method(mFixnum, "to_json", mFixnum_to_json, -1); + + VALUE mBignum = rb_define_module_under(mGeneratorMethods, "Bignum"); + rb_define_method(mBignum, "to_json", mBignum_to_json, -1); +#endif + VALUE mFloat = rb_define_module_under(mGeneratorMethods, "Float"); + rb_define_method(mFloat, "to_json", mFloat_to_json, -1); + + VALUE mString = rb_define_module_under(mGeneratorMethods, "String"); + rb_define_singleton_method(mString, "included", mString_included_s, 1); + rb_define_method(mString, "to_json", mString_to_json, -1); + rb_define_method(mString, "to_json_raw", mString_to_json_raw, -1); + rb_define_method(mString, "to_json_raw_object", mString_to_json_raw_object, 0); + + mString_Extend = rb_define_module_under(mString, "Extend"); + rb_define_method(mString_Extend, "json_create", mString_Extend_json_create, 1); + + VALUE mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass"); + rb_define_method(mTrueClass, "to_json", mTrueClass_to_json, -1); + + VALUE mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass"); + rb_define_method(mFalseClass, "to_json", mFalseClass_to_json, -1); + + VALUE mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass"); + rb_define_method(mNilClass, "to_json", mNilClass_to_json, -1); + + rb_global_variable(&Encoding_UTF_8); + Encoding_UTF_8 = rb_const_get(rb_path2class("Encoding"), rb_intern("UTF_8")); + + i_to_s = rb_intern("to_s"); + i_to_json = rb_intern("to_json"); + i_new = rb_intern("new"); + i_pack = rb_intern("pack"); + i_unpack = rb_intern("unpack"); + i_create_id = rb_intern("create_id"); + i_extend = rb_intern("extend"); + i_encode = rb_intern("encode"); + + sym_indent = ID2SYM(rb_intern("indent")); + sym_space = ID2SYM(rb_intern("space")); + sym_space_before = ID2SYM(rb_intern("space_before")); + sym_object_nl = ID2SYM(rb_intern("object_nl")); + sym_array_nl = ID2SYM(rb_intern("array_nl")); + sym_max_nesting = ID2SYM(rb_intern("max_nesting")); + sym_allow_nan = ID2SYM(rb_intern("allow_nan")); + sym_ascii_only = ID2SYM(rb_intern("ascii_only")); + sym_depth = ID2SYM(rb_intern("depth")); + sym_buffer_initial_length = ID2SYM(rb_intern("buffer_initial_length")); + sym_script_safe = ID2SYM(rb_intern("script_safe")); + sym_escape_slash = ID2SYM(rb_intern("escape_slash")); + sym_strict = ID2SYM(rb_intern("strict")); + sym_as_json = ID2SYM(rb_intern("as_json")); + + usascii_encindex = rb_usascii_encindex(); + utf8_encindex = rb_utf8_encindex(); + binary_encindex = rb_ascii8bit_encindex(); + + rb_require("json/ext/generator/state"); + + + switch (find_simd_implementation()) { +#ifdef HAVE_SIMD +#ifdef HAVE_SIMD_NEON + case SIMD_NEON: + search_escape_basic_impl = search_escape_basic_neon; + break; +#endif /* HAVE_SIMD_NEON */ +#ifdef HAVE_SIMD_SSE2 + case SIMD_SSE2: + search_escape_basic_impl = search_escape_basic_sse2; + break; +#endif /* HAVE_SIMD_SSE2 */ +#endif /* HAVE_SIMD */ + default: + search_escape_basic_impl = search_escape_basic; + break; + } +} diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/Makefile b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/Makefile new file mode 100644 index 000000000..09e329bf8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/Makefile @@ -0,0 +1,269 @@ + +SHELL = /bin/sh + +# V=0 quiet, V=1 verbose. other values don't work. +V = 0 +V0 = $(V:0=) +Q1 = $(V:1=) +Q = $(Q1:0=@) +ECHO1 = $(V:1=@ :) +ECHO = $(ECHO1:0=@ echo) +NULLCMD = : + +#### Start of system configuration section. #### + +srcdir = . +topdir = /usr/include/ruby-3.2.0 +hdrdir = $(topdir) +arch_hdrdir = /usr/include/x86_64-linux-gnu/ruby-3.2.0 +PATH_SEPARATOR = : +VPATH = $(srcdir):$(arch_hdrdir)/ruby:$(hdrdir)/ruby +prefix = $(DESTDIR)/usr +rubysitearchprefix = $(sitearchlibdir)/$(RUBY_BASE_NAME) +rubyarchprefix = $(archlibdir)/$(RUBY_BASE_NAME) +rubylibprefix = $(libdir)/$(RUBY_BASE_NAME) +exec_prefix = $(prefix) +vendorarchhdrdir = $(sitearchincludedir)/$(RUBY_VERSION_NAME)/vendor_ruby +sitearchhdrdir = $(sitearchincludedir)/$(RUBY_VERSION_NAME)/site_ruby +rubyarchhdrdir = $(archincludedir)/$(RUBY_VERSION_NAME) +vendorhdrdir = $(rubyhdrdir)/vendor_ruby +sitehdrdir = $(rubyhdrdir)/site_ruby +rubyhdrdir = $(includedir)/$(RUBY_VERSION_NAME) +vendorarchdir = $(rubysitearchprefix)/vendor_ruby/$(ruby_version) +vendorlibdir = $(vendordir)/$(ruby_version) +vendordir = $(rubylibprefix)/vendor_ruby +sitearchdir = $(DESTDIR)/usr/local/lib/x86_64-linux-gnu/site_ruby +sitelibdir = $(sitedir)/$(ruby_version) +sitedir = $(DESTDIR)/usr/local/lib/site_ruby +rubyarchdir = $(rubyarchprefix)/$(ruby_version) +rubylibdir = $(rubylibprefix)/$(ruby_version) +sitearchincludedir = $(includedir)/$(sitearch) +archincludedir = $(includedir)/$(arch) +sitearchlibdir = $(libdir)/$(sitearch) +archlibdir = $(libdir)/$(arch) +ridir = $(datarootdir)/$(RI_BASE_NAME) +mandir = $(datarootdir)/man +localedir = $(datarootdir)/locale +libdir = $(exec_prefix)/lib +psdir = $(docdir) +pdfdir = $(docdir) +dvidir = $(docdir) +htmldir = $(docdir) +infodir = $(datarootdir)/info +docdir = $(datarootdir)/doc/$(PACKAGE) +oldincludedir = $(DESTDIR)/usr/include +includedir = $(prefix)/include +runstatedir = $(DESTDIR)/var/run +localstatedir = $(DESTDIR)/var +sharedstatedir = $(prefix)/com +sysconfdir = $(DESTDIR)/etc +datadir = $(datarootdir) +datarootdir = $(prefix)/share +libexecdir = $(exec_prefix)/libexec +sbindir = $(exec_prefix)/sbin +bindir = $(exec_prefix)/bin +archdir = $(rubyarchdir) + + +CC_WRAPPER = +CC = x86_64-linux-gnu-gcc +CXX = x86_64-linux-gnu-g++ +LIBRUBY = $(LIBRUBY_SO) +LIBRUBY_A = lib$(RUBY_SO_NAME)-static.a +LIBRUBYARG_SHARED = -l$(RUBY_SO_NAME) +LIBRUBYARG_STATIC = -l$(RUBY_SO_NAME)-static $(MAINLIBS) +empty = +OUTFLAG = -o $(empty) +COUTFLAG = -o $(empty) +CSRCFLAG = $(empty) + +RUBY_EXTCONF_H = +cflags = $(optflags) $(debugflags) $(warnflags) +cxxflags = +optflags = -O3 -fno-fast-math +debugflags = -ggdb3 +warnflags = -Wall -Wextra -Wdeprecated-declarations -Wdiv-by-zero -Wduplicated-cond -Wimplicit-function-declaration -Wimplicit-int -Wmisleading-indentation -Wpointer-arith -Wwrite-strings -Wold-style-definition -Wimplicit-fallthrough=0 -Wmissing-noreturn -Wno-cast-function-type -Wno-constant-logical-operand -Wno-long-long -Wno-missing-field-initializers -Wno-overlength-strings -Wno-packed-bitfield-compat -Wno-parentheses-equality -Wno-self-assign -Wno-tautological-compare -Wno-unused-parameter -Wno-unused-value -Wsuggest-attribute=format -Wsuggest-attribute=noreturn -Wunused-variable -Wundef +cppflags = +CCDLFLAGS = -fPIC +CFLAGS = $(CCDLFLAGS) -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 -fPIC -std=c99 $(ARCH_FLAG) +INCFLAGS = -I. -I$(arch_hdrdir) -I$(hdrdir)/ruby/backward -I$(hdrdir) -I$(srcdir) +DEFS = +CPPFLAGS = -DHAVE_RB_ENC_INTERNED_STR -DHAVE_RB_HASH_NEW_CAPA -DHAVE_RB_HASH_BULK_INSERT -DHAVE_STRNLEN -DHAVE_X86INTRIN_H -DJSON_ENABLE_SIMD -DHAVE_CPUID_H -Wdate-time -D_FORTIFY_SOURCE=3 $(DEFS) $(cppflags) +CXXFLAGS = $(CCDLFLAGS) -g -O2 -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -ffile-prefix-map=BUILDDIR=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -fdebug-prefix-map=BUILDDIR=/usr/src/ruby3.2-3.2.3-1ubuntu0.24.04.6 $(ARCH_FLAG) +ldflags = -L. -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -rdynamic -Wl,-export-dynamic -Wl,--no-as-needed +dldflags = -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now +ARCH_FLAG = +DLDFLAGS = $(ldflags) $(dldflags) $(ARCH_FLAG) +LDSHARED = $(CC) -shared +LDSHAREDXX = $(CXX) -shared +AR = x86_64-linux-gnu-gcc-ar +EXEEXT = + +RUBY_INSTALL_NAME = $(RUBY_BASE_NAME)3.2 +RUBY_SO_NAME = ruby-3.2 +RUBYW_INSTALL_NAME = +RUBY_VERSION_NAME = $(RUBY_BASE_NAME)-$(ruby_version) +RUBYW_BASE_NAME = rubyw +RUBY_BASE_NAME = ruby + +arch = x86_64-linux-gnu +sitearch = $(arch) +ruby_version = 3.2.0 +ruby = $(bindir)/$(RUBY_BASE_NAME)3.2 +RUBY = $(ruby) +BUILTRUBY = $(bindir)/$(RUBY_BASE_NAME)3.2 +ruby_headers = $(hdrdir)/ruby.h $(hdrdir)/ruby/backward.h $(hdrdir)/ruby/ruby.h $(hdrdir)/ruby/defines.h $(hdrdir)/ruby/missing.h $(hdrdir)/ruby/intern.h $(hdrdir)/ruby/st.h $(hdrdir)/ruby/subst.h $(arch_hdrdir)/ruby/config.h + +RM = rm -f +RM_RF = rm -fr +RMDIRS = rmdir --ignore-fail-on-non-empty -p +MAKEDIRS = /bin/mkdir -p +INSTALL = /usr/bin/install -c +INSTALL_PROG = $(INSTALL) -m 0755 +INSTALL_DATA = $(INSTALL) -m 644 +COPY = cp +TOUCH = exit > + +#### End of system configuration section. #### + +preload = +libpath = . $(archlibdir) +LIBPATH = -L. -L$(archlibdir) +DEFFILE = + +CLEANFILES = mkmf.log +DISTCLEANFILES = +DISTCLEANDIRS = + +extout = +extout_prefix = +target_prefix = /json/ext +LOCAL_LIBS = +LIBS = $(LIBRUBYARG_SHARED) -lm -lpthread -lc +ORIG_SRCS = parser.c +SRCS = $(ORIG_SRCS) +OBJS = parser.o +HDRS = +LOCAL_HDRS = +TARGET = parser +TARGET_NAME = parser +TARGET_ENTRY = Init_$(TARGET_NAME) +DLLIB = $(TARGET).so +EXTSTATIC = +STATIC_LIB = + +TIMESTAMP_DIR = . +BINDIR = $(bindir) +RUBYCOMMONDIR = $(sitedir)$(target_prefix) +RUBYLIBDIR = $(sitelibdir)$(target_prefix) +RUBYARCHDIR = $(sitearchdir)$(target_prefix) +HDRDIR = $(sitehdrdir)$(target_prefix) +ARCHHDRDIR = $(sitearchhdrdir)$(target_prefix) +TARGET_SO_DIR = +TARGET_SO = $(TARGET_SO_DIR)$(DLLIB) +CLEANLIBS = $(TARGET_SO) false +CLEANOBJS = $(OBJS) *.bak +TARGET_SO_DIR_TIMESTAMP = $(TIMESTAMP_DIR)/.sitearchdir.-.json.-.ext.time + +all: $(DLLIB) +static: $(STATIC_LIB) +.PHONY: all install static install-so install-rb +.PHONY: clean clean-so clean-static clean-rb + +clean-static:: +clean-rb-default:: +clean-rb:: +clean-so:: +clean: clean-so clean-static clean-rb-default clean-rb + -$(Q)$(RM_RF) $(CLEANLIBS) $(CLEANOBJS) $(CLEANFILES) .*.time + +distclean-rb-default:: +distclean-rb:: +distclean-so:: +distclean-static:: +distclean: clean distclean-so distclean-static distclean-rb-default distclean-rb + -$(Q)$(RM) Makefile $(RUBY_EXTCONF_H) conftest.* mkmf.log + -$(Q)$(RM) core ruby$(EXEEXT) *~ $(DISTCLEANFILES) + -$(Q)$(RMDIRS) $(DISTCLEANDIRS) 2> /dev/null || true + +realclean: distclean +install: install-so install-rb + +install-so: $(DLLIB) $(TARGET_SO_DIR_TIMESTAMP) + $(INSTALL_PROG) $(DLLIB) $(RUBYARCHDIR) +clean-static:: + -$(Q)$(RM) $(STATIC_LIB) +install-rb: pre-install-rb do-install-rb install-rb-default +install-rb-default: pre-install-rb-default do-install-rb-default +pre-install-rb: Makefile +pre-install-rb-default: Makefile +do-install-rb: +do-install-rb-default: +pre-install-rb-default: + @$(NULLCMD) +$(TARGET_SO_DIR_TIMESTAMP): + $(Q) $(MAKEDIRS) $(@D) $(RUBYARCHDIR) + $(Q) $(TOUCH) $@ + +site-install: site-install-so site-install-rb +site-install-so: install-so +site-install-rb: install-rb + +.SUFFIXES: .c .m .cc .mm .cxx .cpp .o .S + +.cc.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.cc.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.mm.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.mm.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.cxx.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.cxx.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.cpp.o: + $(ECHO) compiling $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.cpp.S: + $(ECHO) translating $(<) + $(Q) $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.c.o: + $(ECHO) compiling $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.c.S: + $(ECHO) translating $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +.m.o: + $(ECHO) compiling $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -c $(CSRCFLAG)$< + +.m.S: + $(ECHO) translating $(<) + $(Q) $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -S $(CSRCFLAG)$< + +$(TARGET_SO): $(OBJS) Makefile + $(ECHO) linking shared-object json/ext/$(DLLIB) + -$(Q)$(RM) $(@) + $(Q) $(LDSHARED) -o $@ $(OBJS) $(LIBPATH) $(DLDFLAGS) $(LOCAL_LIBS) $(LIBS) + + + +$(OBJS): $(HDRS) $(ruby_headers) diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/extconf.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/extconf.rb new file mode 100644 index 000000000..de5d5758b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/extconf.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true +require 'mkmf' + +have_func("rb_enc_interned_str", "ruby/encoding.h") # RUBY_VERSION >= 3.0 +have_func("rb_hash_new_capa", "ruby.h") # RUBY_VERSION >= 3.2 +have_func("rb_hash_bulk_insert", "ruby.h") # Missing on TruffleRuby +have_func("strnlen", "string.h") # Missing on Solaris 10 + +append_cflags("-std=c99") + +if enable_config('parser-use-simd', default=!ENV["JSON_DISABLE_SIMD"]) + load __dir__ + "/../simd/conf.rb" +end + +create_makefile 'json/ext/parser' diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/parser.c b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/parser.c new file mode 100644 index 000000000..ab9d6c205 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/parser/parser.c @@ -0,0 +1,1552 @@ +#include "ruby.h" +#include "ruby/encoding.h" + +/* shims */ +/* This is the fallback definition from Ruby 3.4 */ + +#ifndef RBIMPL_STDBOOL_H +#if defined(__cplusplus) +# if defined(HAVE_STDBOOL_H) && (__cplusplus >= 201103L) +# include +# endif +#elif defined(HAVE_STDBOOL_H) +# include +#elif !defined(HAVE__BOOL) +typedef unsigned char _Bool; +# define bool _Bool +# define true ((_Bool)+1) +# define false ((_Bool)+0) +# define __bool_true_false_are_defined +#endif +#endif + +#include "../simd/simd.h" + +#ifndef RB_UNLIKELY +#define RB_UNLIKELY(expr) expr +#endif + +#ifndef RB_LIKELY +#define RB_LIKELY(expr) expr +#endif + +static VALUE mJSON, eNestingError, Encoding_UTF_8; +static VALUE CNaN, CInfinity, CMinusInfinity; + +static ID i_chr, i_aset, i_aref, + i_leftshift, i_new, i_try_convert, i_uminus, i_encode; + +static VALUE sym_max_nesting, sym_allow_nan, sym_allow_trailing_comma, sym_symbolize_names, sym_freeze, + sym_decimal_class, sym_on_load, sym_allow_duplicate_key; + +static int binary_encindex; +static int utf8_encindex; + +#ifndef HAVE_RB_HASH_BULK_INSERT +// For TruffleRuby +void +rb_hash_bulk_insert(long count, const VALUE *pairs, VALUE hash) +{ + long index = 0; + while (index < count) { + VALUE name = pairs[index++]; + VALUE value = pairs[index++]; + rb_hash_aset(hash, name, value); + } + RB_GC_GUARD(hash); +} +#endif + +#ifndef HAVE_RB_HASH_NEW_CAPA +#define rb_hash_new_capa(n) rb_hash_new() +#endif + + +/* name cache */ + +#include +#include + +// Object names are likely to be repeated, and are frozen. +// As such we can re-use them if we keep a cache of the ones we've seen so far, +// and save much more expensive lookups into the global fstring table. +// This cache implementation is deliberately simple, as we're optimizing for compactness, +// to be able to fit safely on the stack. +// As such, binary search into a sorted array gives a good tradeoff between compactness and +// performance. +#define JSON_RVALUE_CACHE_CAPA 63 +typedef struct rvalue_cache_struct { + int length; + VALUE entries[JSON_RVALUE_CACHE_CAPA]; +} rvalue_cache; + +static rb_encoding *enc_utf8; + +#define JSON_RVALUE_CACHE_MAX_ENTRY_LENGTH 55 + +static inline VALUE build_interned_string(const char *str, const long length) +{ +# ifdef HAVE_RB_ENC_INTERNED_STR + return rb_enc_interned_str(str, length, enc_utf8); +# else + VALUE rstring = rb_utf8_str_new(str, length); + return rb_funcall(rb_str_freeze(rstring), i_uminus, 0); +# endif +} + +static inline VALUE build_symbol(const char *str, const long length) +{ + return rb_str_intern(build_interned_string(str, length)); +} + +static void rvalue_cache_insert_at(rvalue_cache *cache, int index, VALUE rstring) +{ + MEMMOVE(&cache->entries[index + 1], &cache->entries[index], VALUE, cache->length - index); + cache->length++; + cache->entries[index] = rstring; +} + +static inline int rstring_cache_cmp(const char *str, const long length, VALUE rstring) +{ + long rstring_length = RSTRING_LEN(rstring); + if (length == rstring_length) { + return memcmp(str, RSTRING_PTR(rstring), length); + } else { + return (int)(length - rstring_length); + } +} + +static VALUE rstring_cache_fetch(rvalue_cache *cache, const char *str, const long length) +{ + if (RB_UNLIKELY(length > JSON_RVALUE_CACHE_MAX_ENTRY_LENGTH)) { + // Common names aren't likely to be very long. So we just don't + // cache names above an arbitrary threshold. + return Qfalse; + } + + if (RB_UNLIKELY(!isalpha((unsigned char)str[0]))) { + // Simple heuristic, if the first character isn't a letter, + // we're much less likely to see this string again. + // We mostly want to cache strings that are likely to be repeated. + return Qfalse; + } + + int low = 0; + int high = cache->length - 1; + int mid = 0; + int last_cmp = 0; + + while (low <= high) { + mid = (high + low) >> 1; + VALUE entry = cache->entries[mid]; + last_cmp = rstring_cache_cmp(str, length, entry); + + if (last_cmp == 0) { + return entry; + } else if (last_cmp > 0) { + low = mid + 1; + } else { + high = mid - 1; + } + } + + if (RB_UNLIKELY(memchr(str, '\\', length))) { + // We assume the overwhelming majority of names don't need to be escaped. + // But if they do, we have to fallback to the slow path. + return Qfalse; + } + + VALUE rstring = build_interned_string(str, length); + + if (cache->length < JSON_RVALUE_CACHE_CAPA) { + if (last_cmp > 0) { + mid += 1; + } + + rvalue_cache_insert_at(cache, mid, rstring); + } + return rstring; +} + +static VALUE rsymbol_cache_fetch(rvalue_cache *cache, const char *str, const long length) +{ + if (RB_UNLIKELY(length > JSON_RVALUE_CACHE_MAX_ENTRY_LENGTH)) { + // Common names aren't likely to be very long. So we just don't + // cache names above an arbitrary threshold. + return Qfalse; + } + + if (RB_UNLIKELY(!isalpha((unsigned char)str[0]))) { + // Simple heuristic, if the first character isn't a letter, + // we're much less likely to see this string again. + // We mostly want to cache strings that are likely to be repeated. + return Qfalse; + } + + int low = 0; + int high = cache->length - 1; + int mid = 0; + int last_cmp = 0; + + while (low <= high) { + mid = (high + low) >> 1; + VALUE entry = cache->entries[mid]; + last_cmp = rstring_cache_cmp(str, length, rb_sym2str(entry)); + + if (last_cmp == 0) { + return entry; + } else if (last_cmp > 0) { + low = mid + 1; + } else { + high = mid - 1; + } + } + + if (RB_UNLIKELY(memchr(str, '\\', length))) { + // We assume the overwhelming majority of names don't need to be escaped. + // But if they do, we have to fallback to the slow path. + return Qfalse; + } + + VALUE rsymbol = build_symbol(str, length); + + if (cache->length < JSON_RVALUE_CACHE_CAPA) { + if (last_cmp > 0) { + mid += 1; + } + + rvalue_cache_insert_at(cache, mid, rsymbol); + } + return rsymbol; +} + +/* rvalue stack */ + +#define RVALUE_STACK_INITIAL_CAPA 128 + +enum rvalue_stack_type { + RVALUE_STACK_HEAP_ALLOCATED = 0, + RVALUE_STACK_STACK_ALLOCATED = 1, +}; + +typedef struct rvalue_stack_struct { + enum rvalue_stack_type type; + long capa; + long head; + VALUE *ptr; +} rvalue_stack; + +static rvalue_stack *rvalue_stack_spill(rvalue_stack *old_stack, VALUE *handle, rvalue_stack **stack_ref); + +static rvalue_stack *rvalue_stack_grow(rvalue_stack *stack, VALUE *handle, rvalue_stack **stack_ref) +{ + long required = stack->capa * 2; + + if (stack->type == RVALUE_STACK_STACK_ALLOCATED) { + stack = rvalue_stack_spill(stack, handle, stack_ref); + } else { + REALLOC_N(stack->ptr, VALUE, required); + stack->capa = required; + } + return stack; +} + +static VALUE rvalue_stack_push(rvalue_stack *stack, VALUE value, VALUE *handle, rvalue_stack **stack_ref) +{ + if (RB_UNLIKELY(stack->head >= stack->capa)) { + stack = rvalue_stack_grow(stack, handle, stack_ref); + } + stack->ptr[stack->head] = value; + stack->head++; + return value; +} + +static inline VALUE *rvalue_stack_peek(rvalue_stack *stack, long count) +{ + return stack->ptr + (stack->head - count); +} + +static inline void rvalue_stack_pop(rvalue_stack *stack, long count) +{ + stack->head -= count; +} + +static void rvalue_stack_mark(void *ptr) +{ + rvalue_stack *stack = (rvalue_stack *)ptr; + long index; + for (index = 0; index < stack->head; index++) { + rb_gc_mark(stack->ptr[index]); + } +} + +static void rvalue_stack_free(void *ptr) +{ + rvalue_stack *stack = (rvalue_stack *)ptr; + if (stack) { + ruby_xfree(stack->ptr); + ruby_xfree(stack); + } +} + +static size_t rvalue_stack_memsize(const void *ptr) +{ + const rvalue_stack *stack = (const rvalue_stack *)ptr; + return sizeof(rvalue_stack) + sizeof(VALUE) * stack->capa; +} + +static const rb_data_type_t JSON_Parser_rvalue_stack_type = { + "JSON::Ext::Parser/rvalue_stack", + { + .dmark = rvalue_stack_mark, + .dfree = rvalue_stack_free, + .dsize = rvalue_stack_memsize, + }, + 0, 0, + RUBY_TYPED_FREE_IMMEDIATELY, +}; + +static rvalue_stack *rvalue_stack_spill(rvalue_stack *old_stack, VALUE *handle, rvalue_stack **stack_ref) +{ + rvalue_stack *stack; + *handle = TypedData_Make_Struct(0, rvalue_stack, &JSON_Parser_rvalue_stack_type, stack); + *stack_ref = stack; + MEMCPY(stack, old_stack, rvalue_stack, 1); + + stack->capa = old_stack->capa << 1; + stack->ptr = ALLOC_N(VALUE, stack->capa); + stack->type = RVALUE_STACK_HEAP_ALLOCATED; + MEMCPY(stack->ptr, old_stack->ptr, VALUE, old_stack->head); + return stack; +} + +static void rvalue_stack_eagerly_release(VALUE handle) +{ + if (handle) { + rvalue_stack *stack; + TypedData_Get_Struct(handle, rvalue_stack, &JSON_Parser_rvalue_stack_type, stack); + RTYPEDDATA_DATA(handle) = NULL; + rvalue_stack_free(stack); + } +} + + +#ifndef HAVE_STRNLEN +static size_t strnlen(const char *s, size_t maxlen) +{ + char *p; + return ((p = memchr(s, '\0', maxlen)) ? p - s : maxlen); +} +#endif + +static int convert_UTF32_to_UTF8(char *buf, uint32_t ch) +{ + int len = 1; + if (ch <= 0x7F) { + buf[0] = (char) ch; + } else if (ch <= 0x07FF) { + buf[0] = (char) ((ch >> 6) | 0xC0); + buf[1] = (char) ((ch & 0x3F) | 0x80); + len++; + } else if (ch <= 0xFFFF) { + buf[0] = (char) ((ch >> 12) | 0xE0); + buf[1] = (char) (((ch >> 6) & 0x3F) | 0x80); + buf[2] = (char) ((ch & 0x3F) | 0x80); + len += 2; + } else if (ch <= 0x1fffff) { + buf[0] =(char) ((ch >> 18) | 0xF0); + buf[1] =(char) (((ch >> 12) & 0x3F) | 0x80); + buf[2] =(char) (((ch >> 6) & 0x3F) | 0x80); + buf[3] =(char) ((ch & 0x3F) | 0x80); + len += 3; + } else { + buf[0] = '?'; + } + return len; +} + +enum duplicate_key_action { + JSON_DEPRECATED = 0, + JSON_IGNORE, + JSON_RAISE, +}; + +typedef struct JSON_ParserStruct { + VALUE on_load_proc; + VALUE decimal_class; + ID decimal_method_id; + enum duplicate_key_action on_duplicate_key; + int max_nesting; + bool allow_nan; + bool allow_trailing_comma; + bool parsing_name; + bool symbolize_names; + bool freeze; +} JSON_ParserConfig; + +typedef struct JSON_ParserStateStruct { + VALUE stack_handle; + const char *start; + const char *cursor; + const char *end; + rvalue_stack *stack; + rvalue_cache name_cache; + int in_array; + int current_nesting; +} JSON_ParserState; + +static void cursor_position(JSON_ParserState *state, long *line_out, long *column_out) +{ + const char *cursor = state->cursor; + long column = 0; + long line = 1; + + while (cursor >= state->start) { + if (*cursor-- == '\n') { + break; + } + column++; + } + + while (cursor >= state->start) { + if (*cursor-- == '\n') { + line++; + } + } + *line_out = line; + *column_out = column; +} + +static void emit_parse_warning(const char *message, JSON_ParserState *state) +{ + long line, column; + cursor_position(state, &line, &column); + + VALUE warning = rb_sprintf("%s at line %ld column %ld", message, line, column); + rb_funcall(mJSON, rb_intern("deprecation_warning"), 1, warning); +} + +#define PARSE_ERROR_FRAGMENT_LEN 32 + +#ifdef RBIMPL_ATTR_NORETURN +RBIMPL_ATTR_NORETURN() +#endif +static void raise_parse_error(const char *format, JSON_ParserState *state) +{ + unsigned char buffer[PARSE_ERROR_FRAGMENT_LEN + 3]; + long line, column; + cursor_position(state, &line, &column); + + const char *ptr = "EOF"; + if (state->cursor && state->cursor < state->end) { + ptr = state->cursor; + size_t len = 0; + while (len < PARSE_ERROR_FRAGMENT_LEN) { + char ch = ptr[len]; + if (!ch || ch == '\n' || ch == ' ' || ch == '\t' || ch == '\r') { + break; + } + len++; + } + + if (len) { + buffer[0] = '\''; + MEMCPY(buffer + 1, ptr, char, len); + + while (buffer[len] >= 0x80 && buffer[len] < 0xC0) { // Is continuation byte + len--; + } + + if (buffer[len] >= 0xC0) { // multibyte character start + len--; + } + + buffer[len + 1] = '\''; + buffer[len + 2] = '\0'; + ptr = (const char *)buffer; + } + } + + VALUE msg = rb_sprintf(format, ptr); + VALUE message = rb_enc_sprintf(enc_utf8, "%s at line %ld column %ld", RSTRING_PTR(msg), line, column); + RB_GC_GUARD(msg); + + VALUE exc = rb_exc_new_str(rb_path2class("JSON::ParserError"), message); + rb_ivar_set(exc, rb_intern("@line"), LONG2NUM(line)); + rb_ivar_set(exc, rb_intern("@column"), LONG2NUM(column)); + rb_exc_raise(exc); +} + +#ifdef RBIMPL_ATTR_NORETURN +RBIMPL_ATTR_NORETURN() +#endif +static void raise_parse_error_at(const char *format, JSON_ParserState *state, const char *at) +{ + state->cursor = at; + raise_parse_error(format, state); +} + +/* unicode */ + +static const signed char digit_values[256] = { + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, + -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1 +}; + +static uint32_t unescape_unicode(JSON_ParserState *state, const unsigned char *p) +{ + signed char b; + uint32_t result = 0; + b = digit_values[p[0]]; + if (b < 0) raise_parse_error_at("incomplete unicode character escape sequence at %s", state, (char *)p - 2); + result = (result << 4) | (unsigned char)b; + b = digit_values[p[1]]; + if (b < 0) raise_parse_error_at("incomplete unicode character escape sequence at %s", state, (char *)p - 2); + result = (result << 4) | (unsigned char)b; + b = digit_values[p[2]]; + if (b < 0) raise_parse_error_at("incomplete unicode character escape sequence at %s", state, (char *)p - 2); + result = (result << 4) | (unsigned char)b; + b = digit_values[p[3]]; + if (b < 0) raise_parse_error_at("incomplete unicode character escape sequence at %s", state, (char *)p - 2); + result = (result << 4) | (unsigned char)b; + return result; +} + +#define GET_PARSER_CONFIG \ + JSON_ParserConfig *config; \ + TypedData_Get_Struct(self, JSON_ParserConfig, &JSON_ParserConfig_type, config) + +static const rb_data_type_t JSON_ParserConfig_type; + +static const bool whitespace[256] = { + [' '] = 1, + ['\t'] = 1, + ['\n'] = 1, + ['\r'] = 1, + ['/'] = 1, +}; + +static void +json_eat_comments(JSON_ParserState *state) +{ + if (state->cursor + 1 < state->end) { + switch (state->cursor[1]) { + case '/': { + state->cursor = memchr(state->cursor, '\n', state->end - state->cursor); + if (!state->cursor) { + state->cursor = state->end; + } else { + state->cursor++; + } + break; + } + case '*': { + state->cursor += 2; + while (true) { + state->cursor = memchr(state->cursor, '*', state->end - state->cursor); + if (!state->cursor) { + raise_parse_error_at("unexpected end of input, expected closing '*/'", state, state->end); + } else { + state->cursor++; + if (state->cursor < state->end && *state->cursor == '/') { + state->cursor++; + break; + } + } + } + break; + } + default: + raise_parse_error("unexpected token %s", state); + break; + } + } else { + raise_parse_error("unexpected token %s", state); + } +} + +static inline void +json_eat_whitespace(JSON_ParserState *state) +{ + while (state->cursor < state->end && RB_UNLIKELY(whitespace[(unsigned char)*state->cursor])) { + if (RB_LIKELY(*state->cursor != '/')) { + state->cursor++; + } else { + json_eat_comments(state); + } + } +} + +static inline VALUE build_string(const char *start, const char *end, bool intern, bool symbolize) +{ + if (symbolize) { + intern = true; + } + VALUE result; +# ifdef HAVE_RB_ENC_INTERNED_STR + if (intern) { + result = rb_enc_interned_str(start, (long)(end - start), enc_utf8); + } else { + result = rb_utf8_str_new(start, (long)(end - start)); + } +# else + result = rb_utf8_str_new(start, (long)(end - start)); + if (intern) { + result = rb_funcall(rb_str_freeze(result), i_uminus, 0); + } +# endif + + if (symbolize) { + result = rb_str_intern(result); + } + + return result; +} + +static inline VALUE json_string_fastpath(JSON_ParserState *state, const char *string, const char *stringEnd, bool is_name, bool intern, bool symbolize) +{ + size_t bufferSize = stringEnd - string; + + if (is_name && state->in_array) { + VALUE cached_key; + if (RB_UNLIKELY(symbolize)) { + cached_key = rsymbol_cache_fetch(&state->name_cache, string, bufferSize); + } else { + cached_key = rstring_cache_fetch(&state->name_cache, string, bufferSize); + } + + if (RB_LIKELY(cached_key)) { + return cached_key; + } + } + + return build_string(string, stringEnd, intern, symbolize); +} + +static VALUE json_string_unescape(JSON_ParserState *state, const char *string, const char *stringEnd, bool is_name, bool intern, bool symbolize) +{ + size_t bufferSize = stringEnd - string; + const char *p = string, *pe = string, *unescape, *bufferStart; + char *buffer; + int unescape_len; + char buf[4]; + + if (is_name && state->in_array) { + VALUE cached_key; + if (RB_UNLIKELY(symbolize)) { + cached_key = rsymbol_cache_fetch(&state->name_cache, string, bufferSize); + } else { + cached_key = rstring_cache_fetch(&state->name_cache, string, bufferSize); + } + + if (RB_LIKELY(cached_key)) { + return cached_key; + } + } + + VALUE result = rb_str_buf_new(bufferSize); + rb_enc_associate_index(result, utf8_encindex); + buffer = RSTRING_PTR(result); + bufferStart = buffer; + + while (pe < stringEnd && (pe = memchr(pe, '\\', stringEnd - pe))) { + unescape = (char *) "?"; + unescape_len = 1; + if (pe > p) { + MEMCPY(buffer, p, char, pe - p); + buffer += pe - p; + } + switch (*++pe) { + case 'n': + unescape = (char *) "\n"; + break; + case 'r': + unescape = (char *) "\r"; + break; + case 't': + unescape = (char *) "\t"; + break; + case '"': + unescape = (char *) "\""; + break; + case '\\': + unescape = (char *) "\\"; + break; + case 'b': + unescape = (char *) "\b"; + break; + case 'f': + unescape = (char *) "\f"; + break; + case 'u': + if (pe > stringEnd - 5) { + raise_parse_error_at("incomplete unicode character escape sequence at %s", state, p); + } else { + uint32_t ch = unescape_unicode(state, (unsigned char *) ++pe); + pe += 3; + /* To handle values above U+FFFF, we take a sequence of + * \uXXXX escapes in the U+D800..U+DBFF then + * U+DC00..U+DFFF ranges, take the low 10 bits from each + * to make a 20-bit number, then add 0x10000 to get the + * final codepoint. + * + * See Unicode 15: 3.8 "Surrogates", 5.3 "Handling + * Surrogate Pairs in UTF-16", and 23.6 "Surrogates + * Area". + */ + if ((ch & 0xFC00) == 0xD800) { + pe++; + if (pe > stringEnd - 6) { + raise_parse_error_at("incomplete surrogate pair at %s", state, p); + } + if (pe[0] == '\\' && pe[1] == 'u') { + uint32_t sur = unescape_unicode(state, (unsigned char *) pe + 2); + ch = (((ch & 0x3F) << 10) | ((((ch >> 6) & 0xF) + 1) << 16) + | (sur & 0x3FF)); + pe += 5; + } else { + unescape = (char *) "?"; + break; + } + } + unescape_len = convert_UTF32_to_UTF8(buf, ch); + unescape = buf; + } + break; + default: + p = pe; + continue; + } + MEMCPY(buffer, unescape, char, unescape_len); + buffer += unescape_len; + p = ++pe; + } + + if (stringEnd > p) { + MEMCPY(buffer, p, char, stringEnd - p); + buffer += stringEnd - p; + } + rb_str_set_len(result, buffer - bufferStart); + + if (symbolize) { + result = rb_str_intern(result); + } else if (intern) { + result = rb_funcall(rb_str_freeze(result), i_uminus, 0); + } + + return result; +} + +#define MAX_FAST_INTEGER_SIZE 18 +static inline VALUE fast_decode_integer(const char *p, const char *pe) +{ + bool negative = false; + if (*p == '-') { + negative = true; + p++; + } + + long long memo = 0; + while (p < pe) { + memo *= 10; + memo += *p - '0'; + p++; + } + + if (negative) { + memo = -memo; + } + return LL2NUM(memo); +} + +static VALUE json_decode_large_integer(const char *start, long len) +{ + VALUE buffer_v; + char *buffer = RB_ALLOCV_N(char, buffer_v, len + 1); + MEMCPY(buffer, start, char, len); + buffer[len] = '\0'; + VALUE number = rb_cstr2inum(buffer, 10); + RB_ALLOCV_END(buffer_v); + return number; +} + +static inline VALUE +json_decode_integer(const char *start, const char *end) +{ + long len = end - start; + if (RB_LIKELY(len < MAX_FAST_INTEGER_SIZE)) { + return fast_decode_integer(start, end); + } + return json_decode_large_integer(start, len); +} + +static VALUE json_decode_large_float(const char *start, long len) +{ + VALUE buffer_v; + char *buffer = RB_ALLOCV_N(char, buffer_v, len + 1); + MEMCPY(buffer, start, char, len); + buffer[len] = '\0'; + VALUE number = DBL2NUM(rb_cstr_to_dbl(buffer, 1)); + RB_ALLOCV_END(buffer_v); + return number; +} + +static VALUE json_decode_float(JSON_ParserConfig *config, const char *start, const char *end) +{ + long len = end - start; + + if (RB_UNLIKELY(config->decimal_class)) { + VALUE text = rb_str_new(start, len); + return rb_funcallv(config->decimal_class, config->decimal_method_id, 1, &text); + } else if (RB_LIKELY(len < 64)) { + char buffer[64]; + MEMCPY(buffer, start, char, len); + buffer[len] = '\0'; + return DBL2NUM(rb_cstr_to_dbl(buffer, 1)); + } else { + return json_decode_large_float(start, len); + } +} + +static inline VALUE json_decode_array(JSON_ParserState *state, JSON_ParserConfig *config, long count) +{ + VALUE array = rb_ary_new_from_values(count, rvalue_stack_peek(state->stack, count)); + rvalue_stack_pop(state->stack, count); + + if (config->freeze) { + RB_OBJ_FREEZE(array); + } + + return array; +} + +static VALUE json_find_duplicated_key(size_t count, const VALUE *pairs) +{ + VALUE set = rb_hash_new_capa(count / 2); + for (size_t index = 0; index < count; index += 2) { + size_t before = RHASH_SIZE(set); + VALUE key = pairs[index]; + rb_hash_aset(set, key, Qtrue); + if (RHASH_SIZE(set) == before) { + if (RB_SYMBOL_P(key)) { + return rb_sym2str(key); + } + return key; + } + } + return Qfalse; +} + +static void emit_duplicate_key_warning(JSON_ParserState *state, VALUE duplicate_key) +{ + VALUE message = rb_sprintf( + "detected duplicate key %"PRIsVALUE" in JSON object. This will raise an error in json 3.0 unless enabled via `allow_duplicate_key: true`", + rb_inspect(duplicate_key) + ); + + emit_parse_warning(RSTRING_PTR(message), state); + RB_GC_GUARD(message); +} + +#ifdef RBIMPL_ATTR_NORETURN +RBIMPL_ATTR_NORETURN() +#endif +static void raise_duplicate_key_error(JSON_ParserState *state, VALUE duplicate_key) +{ + VALUE message = rb_sprintf( + "duplicate key %"PRIsVALUE, + rb_inspect(duplicate_key) + ); + + raise_parse_error(RSTRING_PTR(message), state); + RB_GC_GUARD(message); +} + +static inline VALUE json_decode_object(JSON_ParserState *state, JSON_ParserConfig *config, size_t count) +{ + size_t entries_count = count / 2; + VALUE object = rb_hash_new_capa(entries_count); + const VALUE *pairs = rvalue_stack_peek(state->stack, count); + rb_hash_bulk_insert(count, pairs, object); + + if (RB_UNLIKELY(RHASH_SIZE(object) < entries_count)) { + switch (config->on_duplicate_key) { + case JSON_IGNORE: + break; + case JSON_DEPRECATED: + emit_duplicate_key_warning(state, json_find_duplicated_key(count, pairs)); + break; + case JSON_RAISE: + raise_duplicate_key_error(state, json_find_duplicated_key(count, pairs)); + break; + } + } + + rvalue_stack_pop(state->stack, count); + + if (config->freeze) { + RB_OBJ_FREEZE(object); + } + + return object; +} + +static inline VALUE json_decode_string(JSON_ParserState *state, JSON_ParserConfig *config, const char *start, const char *end, bool escaped, bool is_name) +{ + VALUE string; + bool intern = is_name || config->freeze; + bool symbolize = is_name && config->symbolize_names; + if (escaped) { + string = json_string_unescape(state, start, end, is_name, intern, symbolize); + } else { + string = json_string_fastpath(state, start, end, is_name, intern, symbolize); + } + + return string; +} + +static inline VALUE json_push_value(JSON_ParserState *state, JSON_ParserConfig *config, VALUE value) +{ + if (RB_UNLIKELY(config->on_load_proc)) { + value = rb_proc_call_with_block(config->on_load_proc, 1, &value, Qnil); + } + rvalue_stack_push(state->stack, value, &state->stack_handle, &state->stack); + return value; +} + +static const bool string_scan_table[256] = { + // ASCII Control Characters + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + // ASCII Characters + 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // '"' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, // '\\' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +}; + +#if (defined(__GNUC__ ) || defined(__clang__)) +#define FORCE_INLINE __attribute__((always_inline)) +#else +#define FORCE_INLINE +#endif + +#ifdef HAVE_SIMD +static SIMD_Implementation simd_impl = SIMD_NONE; +#endif /* HAVE_SIMD */ + +static inline bool FORCE_INLINE string_scan(JSON_ParserState *state) +{ +#ifdef HAVE_SIMD +#if defined(HAVE_SIMD_NEON) + + uint64_t mask = 0; + if (string_scan_simd_neon(&state->cursor, state->end, &mask)) { + state->cursor += trailing_zeros64(mask) >> 2; + return 1; + } + +#elif defined(HAVE_SIMD_SSE2) + if (simd_impl == SIMD_SSE2) { + int mask = 0; + if (string_scan_simd_sse2(&state->cursor, state->end, &mask)) { + state->cursor += trailing_zeros(mask); + return 1; + } + } +#endif /* HAVE_SIMD_NEON or HAVE_SIMD_SSE2 */ +#endif /* HAVE_SIMD */ + + while (state->cursor < state->end) { + if (RB_UNLIKELY(string_scan_table[(unsigned char)*state->cursor])) { + return 1; + } + *state->cursor++; + } + return 0; +} + +static inline VALUE json_parse_string(JSON_ParserState *state, JSON_ParserConfig *config, bool is_name) +{ + state->cursor++; + const char *start = state->cursor; + bool escaped = false; + + while (RB_UNLIKELY(string_scan(state))) { + switch (*state->cursor) { + case '"': { + VALUE string = json_decode_string(state, config, start, state->cursor, escaped, is_name); + state->cursor++; + return json_push_value(state, config, string); + } + case '\\': { + state->cursor++; + escaped = true; + if ((unsigned char)*state->cursor < 0x20) { + raise_parse_error("invalid ASCII control character in string: %s", state); + } + break; + } + default: + raise_parse_error("invalid ASCII control character in string: %s", state); + break; + } + + state->cursor++; + } + + raise_parse_error("unexpected end of input, expected closing \"", state); + return Qfalse; +} + +static VALUE json_parse_any(JSON_ParserState *state, JSON_ParserConfig *config) +{ + json_eat_whitespace(state); + if (state->cursor >= state->end) { + raise_parse_error("unexpected end of input", state); + } + + switch (*state->cursor) { + case 'n': + if ((state->end - state->cursor >= 4) && (memcmp(state->cursor, "null", 4) == 0)) { + state->cursor += 4; + return json_push_value(state, config, Qnil); + } + + raise_parse_error("unexpected token %s", state); + break; + case 't': + if ((state->end - state->cursor >= 4) && (memcmp(state->cursor, "true", 4) == 0)) { + state->cursor += 4; + return json_push_value(state, config, Qtrue); + } + + raise_parse_error("unexpected token %s", state); + break; + case 'f': + // Note: memcmp with a small power of two compile to an integer comparison + if ((state->end - state->cursor >= 5) && (memcmp(state->cursor + 1, "alse", 4) == 0)) { + state->cursor += 5; + return json_push_value(state, config, Qfalse); + } + + raise_parse_error("unexpected token %s", state); + break; + case 'N': + // Note: memcmp with a small power of two compile to an integer comparison + if (config->allow_nan && (state->end - state->cursor >= 3) && (memcmp(state->cursor + 1, "aN", 2) == 0)) { + state->cursor += 3; + return json_push_value(state, config, CNaN); + } + + raise_parse_error("unexpected token %s", state); + break; + case 'I': + if (config->allow_nan && (state->end - state->cursor >= 8) && (memcmp(state->cursor, "Infinity", 8) == 0)) { + state->cursor += 8; + return json_push_value(state, config, CInfinity); + } + + raise_parse_error("unexpected token %s", state); + break; + case '-': + // Note: memcmp with a small power of two compile to an integer comparison + if ((state->end - state->cursor >= 9) && (memcmp(state->cursor + 1, "Infinity", 8) == 0)) { + if (config->allow_nan) { + state->cursor += 9; + return json_push_value(state, config, CMinusInfinity); + } else { + raise_parse_error("unexpected token %s", state); + } + } + // Fallthrough + case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { + bool integer = true; + + // /\A-?(0|[1-9]\d*)(\.\d+)?([Ee][-+]?\d+)?/ + const char *start = state->cursor; + state->cursor++; + + while ((state->cursor < state->end) && (*state->cursor >= '0') && (*state->cursor <= '9')) { + state->cursor++; + } + + long integer_length = state->cursor - start; + + if (RB_UNLIKELY(start[0] == '0' && integer_length > 1)) { + raise_parse_error_at("invalid number: %s", state, start); + } else if (RB_UNLIKELY(integer_length > 2 && start[0] == '-' && start[1] == '0')) { + raise_parse_error_at("invalid number: %s", state, start); + } else if (RB_UNLIKELY(integer_length == 1 && start[0] == '-')) { + raise_parse_error_at("invalid number: %s", state, start); + } + + if ((state->cursor < state->end) && (*state->cursor == '.')) { + integer = false; + state->cursor++; + + if (state->cursor == state->end || *state->cursor < '0' || *state->cursor > '9') { + raise_parse_error("invalid number: %s", state); + } + + while ((state->cursor < state->end) && (*state->cursor >= '0') && (*state->cursor <= '9')) { + state->cursor++; + } + } + + if ((state->cursor < state->end) && ((*state->cursor == 'e') || (*state->cursor == 'E'))) { + integer = false; + state->cursor++; + if ((state->cursor < state->end) && ((*state->cursor == '+') || (*state->cursor == '-'))) { + state->cursor++; + } + + if (state->cursor == state->end || *state->cursor < '0' || *state->cursor > '9') { + raise_parse_error("invalid number: %s", state); + } + + while ((state->cursor < state->end) && (*state->cursor >= '0') && (*state->cursor <= '9')) { + state->cursor++; + } + } + + if (integer) { + return json_push_value(state, config, json_decode_integer(start, state->cursor)); + } + return json_push_value(state, config, json_decode_float(config, start, state->cursor)); + } + case '"': { + // %r{\A"[^"\\\t\n\x00]*(?:\\[bfnrtu\\/"][^"\\]*)*"} + return json_parse_string(state, config, false); + break; + } + case '[': { + state->cursor++; + json_eat_whitespace(state); + long stack_head = state->stack->head; + + if ((state->cursor < state->end) && (*state->cursor == ']')) { + state->cursor++; + return json_push_value(state, config, json_decode_array(state, config, 0)); + } else { + state->current_nesting++; + if (RB_UNLIKELY(config->max_nesting && (config->max_nesting < state->current_nesting))) { + rb_raise(eNestingError, "nesting of %d is too deep", state->current_nesting); + } + state->in_array++; + json_parse_any(state, config); + } + + while (true) { + json_eat_whitespace(state); + + if (state->cursor < state->end) { + if (*state->cursor == ']') { + state->cursor++; + long count = state->stack->head - stack_head; + state->current_nesting--; + state->in_array--; + return json_push_value(state, config, json_decode_array(state, config, count)); + } + + if (*state->cursor == ',') { + state->cursor++; + if (config->allow_trailing_comma) { + json_eat_whitespace(state); + if ((state->cursor < state->end) && (*state->cursor == ']')) { + continue; + } + } + json_parse_any(state, config); + continue; + } + } + + raise_parse_error("expected ',' or ']' after array value", state); + } + break; + } + case '{': { + const char *object_start_cursor = state->cursor; + + state->cursor++; + json_eat_whitespace(state); + long stack_head = state->stack->head; + + if ((state->cursor < state->end) && (*state->cursor == '}')) { + state->cursor++; + return json_push_value(state, config, json_decode_object(state, config, 0)); + } else { + state->current_nesting++; + if (RB_UNLIKELY(config->max_nesting && (config->max_nesting < state->current_nesting))) { + rb_raise(eNestingError, "nesting of %d is too deep", state->current_nesting); + } + + if (*state->cursor != '"') { + raise_parse_error("expected object key, got %s", state); + } + json_parse_string(state, config, true); + + json_eat_whitespace(state); + if ((state->cursor >= state->end) || (*state->cursor != ':')) { + raise_parse_error("expected ':' after object key", state); + } + state->cursor++; + + json_parse_any(state, config); + } + + while (true) { + json_eat_whitespace(state); + + if (state->cursor < state->end) { + if (*state->cursor == '}') { + state->cursor++; + state->current_nesting--; + size_t count = state->stack->head - stack_head; + + // Temporary rewind cursor in case an error is raised + const char *final_cursor = state->cursor; + state->cursor = object_start_cursor; + VALUE object = json_decode_object(state, config, count); + state->cursor = final_cursor; + + return json_push_value(state, config, object); + } + + if (*state->cursor == ',') { + state->cursor++; + json_eat_whitespace(state); + + if (config->allow_trailing_comma) { + if ((state->cursor < state->end) && (*state->cursor == '}')) { + continue; + } + } + + if (*state->cursor != '"') { + raise_parse_error("expected object key, got: %s", state); + } + json_parse_string(state, config, true); + + json_eat_whitespace(state); + if ((state->cursor >= state->end) || (*state->cursor != ':')) { + raise_parse_error("expected ':' after object key, got: %s", state); + } + state->cursor++; + + json_parse_any(state, config); + + continue; + } + } + + raise_parse_error("expected ',' or '}' after object value, got: %s", state); + } + break; + } + + default: + raise_parse_error("unexpected character: %s", state); + break; + } + + raise_parse_error("unreacheable: %s", state); +} + +static void json_ensure_eof(JSON_ParserState *state) +{ + json_eat_whitespace(state); + if (state->cursor != state->end) { + raise_parse_error("unexpected token at end of stream %s", state); + } +} + +/* + * Document-class: JSON::Ext::Parser + * + * This is the JSON parser implemented as a C extension. It can be configured + * to be used by setting + * + * JSON.parser = JSON::Ext::Parser + * + * with the method parser= in JSON. + * + */ + +static VALUE convert_encoding(VALUE source) +{ + int encindex = RB_ENCODING_GET(source); + + if (RB_LIKELY(encindex == utf8_encindex)) { + return source; + } + + if (encindex == binary_encindex) { + // For historical reason, we silently reinterpret binary strings as UTF-8 + return rb_enc_associate_index(rb_str_dup(source), utf8_encindex); + } + + return rb_funcall(source, i_encode, 1, Encoding_UTF_8); +} + +static int parser_config_init_i(VALUE key, VALUE val, VALUE data) +{ + JSON_ParserConfig *config = (JSON_ParserConfig *)data; + + if (key == sym_max_nesting) { config->max_nesting = RTEST(val) ? FIX2INT(val) : 0; } + else if (key == sym_allow_nan) { config->allow_nan = RTEST(val); } + else if (key == sym_allow_trailing_comma) { config->allow_trailing_comma = RTEST(val); } + else if (key == sym_symbolize_names) { config->symbolize_names = RTEST(val); } + else if (key == sym_freeze) { config->freeze = RTEST(val); } + else if (key == sym_on_load) { config->on_load_proc = RTEST(val) ? val : Qfalse; } + else if (key == sym_allow_duplicate_key) { config->on_duplicate_key = RTEST(val) ? JSON_IGNORE : JSON_RAISE; } + else if (key == sym_decimal_class) { + if (RTEST(val)) { + if (rb_respond_to(val, i_try_convert)) { + config->decimal_class = val; + config->decimal_method_id = i_try_convert; + } else if (rb_respond_to(val, i_new)) { + config->decimal_class = val; + config->decimal_method_id = i_new; + } else if (RB_TYPE_P(val, T_CLASS)) { + VALUE name = rb_class_name(val); + const char *name_cstr = RSTRING_PTR(name); + const char *last_colon = strrchr(name_cstr, ':'); + if (last_colon) { + const char *mod_path_end = last_colon - 1; + VALUE mod_path = rb_str_substr(name, 0, mod_path_end - name_cstr); + config->decimal_class = rb_path_to_class(mod_path); + + const char *method_name_beg = last_colon + 1; + long before_len = method_name_beg - name_cstr; + long len = RSTRING_LEN(name) - before_len; + VALUE method_name = rb_str_substr(name, before_len, len); + config->decimal_method_id = SYM2ID(rb_str_intern(method_name)); + } else { + config->decimal_class = rb_mKernel; + config->decimal_method_id = SYM2ID(rb_str_intern(name)); + } + } + } + } + + return ST_CONTINUE; +} + +static void parser_config_init(JSON_ParserConfig *config, VALUE opts) +{ + config->max_nesting = 100; + + if (!NIL_P(opts)) { + Check_Type(opts, T_HASH); + if (RHASH_SIZE(opts) > 0) { + // We assume in most cases few keys are set so it's faster to go over + // the provided keys than to check all possible keys. + rb_hash_foreach(opts, parser_config_init_i, (VALUE)config); + } + + } +} + +/* + * call-seq: new(opts => {}) + * + * Creates a new JSON::Ext::ParserConfig instance. + * + * It will be configured by the _opts_ hash. _opts_ can have the following + * keys: + * + * _opts_ can have the following keys: + * * *max_nesting*: The maximum depth of nesting allowed in the parsed data + * structures. Disable depth checking with :max_nesting => false|nil|0, it + * defaults to 100. + * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + * defiance of RFC 4627 to be parsed by the Parser. This option defaults to + * false. + * * *symbolize_names*: If set to true, returns symbols for the names + * (keys) in a JSON object. Otherwise strings are returned, which is + * also the default. It's not possible to use this option in + * conjunction with the *create_additions* option. + * * *decimal_class*: Specifies which class to use instead of the default + * (Float) when parsing decimal numbers. This class must accept a single + * string argument in its constructor. + */ +static VALUE cParserConfig_initialize(VALUE self, VALUE opts) +{ + GET_PARSER_CONFIG; + + parser_config_init(config, opts); + + RB_OBJ_WRITTEN(self, Qundef, config->decimal_class); + + return self; +} + +static VALUE cParser_parse(JSON_ParserConfig *config, VALUE Vsource) +{ + Vsource = convert_encoding(StringValue(Vsource)); + StringValue(Vsource); + + VALUE rvalue_stack_buffer[RVALUE_STACK_INITIAL_CAPA]; + rvalue_stack stack = { + .type = RVALUE_STACK_STACK_ALLOCATED, + .ptr = rvalue_stack_buffer, + .capa = RVALUE_STACK_INITIAL_CAPA, + }; + + long len; + const char *start; + RSTRING_GETMEM(Vsource, start, len); + + JSON_ParserState _state = { + .start = start, + .cursor = start, + .end = start + len, + .stack = &stack, + }; + JSON_ParserState *state = &_state; + + VALUE result = json_parse_any(state, config); + + // This may be skipped in case of exception, but + // it won't cause a leak. + rvalue_stack_eagerly_release(state->stack_handle); + + json_ensure_eof(state); + + return result; +} + +/* + * call-seq: parse(source) + * + * Parses the current JSON text _source_ and returns the complete data + * structure as a result. + * It raises JSON::ParserError if fail to parse. + */ +static VALUE cParserConfig_parse(VALUE self, VALUE Vsource) +{ + GET_PARSER_CONFIG; + return cParser_parse(config, Vsource); +} + +static VALUE cParser_m_parse(VALUE klass, VALUE Vsource, VALUE opts) +{ + Vsource = convert_encoding(StringValue(Vsource)); + StringValue(Vsource); + + JSON_ParserConfig _config = {0}; + JSON_ParserConfig *config = &_config; + parser_config_init(config, opts); + + return cParser_parse(config, Vsource); +} + +static void JSON_ParserConfig_mark(void *ptr) +{ + JSON_ParserConfig *config = ptr; + rb_gc_mark(config->on_load_proc); + rb_gc_mark(config->decimal_class); +} + +static void JSON_ParserConfig_free(void *ptr) +{ + JSON_ParserConfig *config = ptr; + ruby_xfree(config); +} + +static size_t JSON_ParserConfig_memsize(const void *ptr) +{ + return sizeof(JSON_ParserConfig); +} + +static const rb_data_type_t JSON_ParserConfig_type = { + "JSON::Ext::Parser/ParserConfig", + { + JSON_ParserConfig_mark, + JSON_ParserConfig_free, + JSON_ParserConfig_memsize, + }, + 0, 0, + RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +static VALUE cJSON_parser_s_allocate(VALUE klass) +{ + JSON_ParserConfig *config; + return TypedData_Make_Struct(klass, JSON_ParserConfig, &JSON_ParserConfig_type, config); +} + +void Init_parser(void) +{ +#ifdef HAVE_RB_EXT_RACTOR_SAFE + rb_ext_ractor_safe(true); +#endif + +#undef rb_intern + rb_require("json/common"); + mJSON = rb_define_module("JSON"); + VALUE mExt = rb_define_module_under(mJSON, "Ext"); + VALUE cParserConfig = rb_define_class_under(mExt, "ParserConfig", rb_cObject); + eNestingError = rb_path2class("JSON::NestingError"); + rb_gc_register_mark_object(eNestingError); + rb_define_alloc_func(cParserConfig, cJSON_parser_s_allocate); + rb_define_method(cParserConfig, "initialize", cParserConfig_initialize, 1); + rb_define_method(cParserConfig, "parse", cParserConfig_parse, 1); + + VALUE cParser = rb_define_class_under(mExt, "Parser", rb_cObject); + rb_define_singleton_method(cParser, "parse", cParser_m_parse, 2); + + CNaN = rb_const_get(mJSON, rb_intern("NaN")); + rb_gc_register_mark_object(CNaN); + + CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); + rb_gc_register_mark_object(CInfinity); + + CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); + rb_gc_register_mark_object(CMinusInfinity); + + rb_global_variable(&Encoding_UTF_8); + Encoding_UTF_8 = rb_const_get(rb_path2class("Encoding"), rb_intern("UTF_8")); + + sym_max_nesting = ID2SYM(rb_intern("max_nesting")); + sym_allow_nan = ID2SYM(rb_intern("allow_nan")); + sym_allow_trailing_comma = ID2SYM(rb_intern("allow_trailing_comma")); + sym_symbolize_names = ID2SYM(rb_intern("symbolize_names")); + sym_freeze = ID2SYM(rb_intern("freeze")); + sym_on_load = ID2SYM(rb_intern("on_load")); + sym_decimal_class = ID2SYM(rb_intern("decimal_class")); + sym_allow_duplicate_key = ID2SYM(rb_intern("allow_duplicate_key")); + + i_chr = rb_intern("chr"); + i_aset = rb_intern("[]="); + i_aref = rb_intern("[]"); + i_leftshift = rb_intern("<<"); + i_new = rb_intern("new"); + i_try_convert = rb_intern("try_convert"); + i_uminus = rb_intern("-@"); + i_encode = rb_intern("encode"); + + binary_encindex = rb_ascii8bit_encindex(); + utf8_encindex = rb_utf8_encindex(); + enc_utf8 = rb_utf8_encoding(); + +#ifdef HAVE_SIMD + simd_impl = find_simd_implementation(); +#endif +} diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/conf.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/conf.rb new file mode 100644 index 000000000..76f774bc9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/conf.rb @@ -0,0 +1,24 @@ +case RbConfig::CONFIG['host_cpu'] +when /^(arm|aarch64)/ + # Try to compile a small program using NEON instructions + header, type, init, extra = 'arm_neon.h', 'uint8x16_t', 'vdupq_n_u8(32)', nil +when /^(x86_64|x64)/ + header, type, init, extra = 'x86intrin.h', '__m128i', '_mm_set1_epi8(32)', 'if (__builtin_cpu_supports("sse2")) { printf("OK"); }' +end +if header + if have_header(header) && try_compile(<<~SRC, '-Werror=implicit-function-declaration') + #{cpp_include(header)} + int main(int argc, char **argv) { + #{type} test = #{init}; + #{extra} + if (argc > 100000) printf("%p", &test); + return 0; + } + SRC + $defs.push("-DJSON_ENABLE_SIMD") + else + puts "Disable SIMD" + end +end + +have_header('cpuid.h') diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/simd.h b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/simd.h new file mode 100644 index 000000000..3abbdb020 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/simd/simd.h @@ -0,0 +1,188 @@ +typedef enum { + SIMD_NONE, + SIMD_NEON, + SIMD_SSE2 +} SIMD_Implementation; + +#ifdef JSON_ENABLE_SIMD + +#ifdef __clang__ +# if __has_builtin(__builtin_ctzll) +# define HAVE_BUILTIN_CTZLL 1 +# else +# define HAVE_BUILTIN_CTZLL 0 +# endif +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) +# define HAVE_BUILTIN_CTZLL 1 +#else +# define HAVE_BUILTIN_CTZLL 0 +#endif + +static inline uint32_t trailing_zeros64(uint64_t input) +{ +#if HAVE_BUILTIN_CTZLL + return __builtin_ctzll(input); +#else + uint32_t trailing_zeros = 0; + uint64_t temp = input; + while ((temp & 1) == 0 && temp > 0) { + trailing_zeros++; + temp >>= 1; + } + return trailing_zeros; +#endif +} + +static inline int trailing_zeros(int input) +{ +#if HAVE_BUILTIN_CTZLL + return __builtin_ctz(input); +#else + int trailing_zeros = 0; + int temp = input; + while ((temp & 1) == 0 && temp > 0) { + trailing_zeros++; + temp >>= 1; + } + return trailing_zeros; +#endif +} + +#if (defined(__GNUC__ ) || defined(__clang__)) +#define FORCE_INLINE __attribute__((always_inline)) +#else +#define FORCE_INLINE +#endif + + +#define SIMD_MINIMUM_THRESHOLD 6 + +#if defined(__ARM_NEON) || defined(__ARM_NEON__) || defined(__aarch64__) || defined(_M_ARM64) +#include + +#define FIND_SIMD_IMPLEMENTATION_DEFINED 1 +static inline SIMD_Implementation find_simd_implementation(void) +{ + return SIMD_NEON; +} + +#define HAVE_SIMD 1 +#define HAVE_SIMD_NEON 1 + +// See: https://community.arm.com/arm-community-blogs/b/servers-and-cloud-computing-blog/posts/porting-x86-vector-bitmask-optimizations-to-arm-neon +static inline FORCE_INLINE uint64_t neon_match_mask(uint8x16_t matches) +{ + const uint8x8_t res = vshrn_n_u16(vreinterpretq_u16_u8(matches), 4); + const uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(res), 0); + return mask & 0x8888888888888888ull; +} + +static inline FORCE_INLINE uint64_t compute_chunk_mask_neon(const char *ptr) +{ + uint8x16_t chunk = vld1q_u8((const unsigned char *)ptr); + + // Trick: c < 32 || c == 34 can be factored as c ^ 2 < 33 + // https://lemire.me/blog/2025/04/13/detect-control-characters-quotes-and-backslashes-efficiently-using-swar/ + const uint8x16_t too_low_or_dbl_quote = vcltq_u8(veorq_u8(chunk, vdupq_n_u8(2)), vdupq_n_u8(33)); + + uint8x16_t has_backslash = vceqq_u8(chunk, vdupq_n_u8('\\')); + uint8x16_t needs_escape = vorrq_u8(too_low_or_dbl_quote, has_backslash); + return neon_match_mask(needs_escape); +} + +static inline FORCE_INLINE int string_scan_simd_neon(const char **ptr, const char *end, uint64_t *mask) +{ + while (*ptr + sizeof(uint8x16_t) <= end) { + uint64_t chunk_mask = compute_chunk_mask_neon(*ptr); + if (chunk_mask) { + *mask = chunk_mask; + return 1; + } + *ptr += sizeof(uint8x16_t); + } + return 0; +} + +static inline uint8x16x4_t load_uint8x16_4(const unsigned char *table) +{ + uint8x16x4_t tab; + tab.val[0] = vld1q_u8(table); + tab.val[1] = vld1q_u8(table+16); + tab.val[2] = vld1q_u8(table+32); + tab.val[3] = vld1q_u8(table+48); + return tab; +} + +#endif /* ARM Neon Support.*/ + +#if defined(__amd64__) || defined(__amd64) || defined(__x86_64__) || defined(__x86_64) || defined(_M_X64) || defined(_M_AMD64) + +#ifdef HAVE_X86INTRIN_H +#include + +#define HAVE_SIMD 1 +#define HAVE_SIMD_SSE2 1 + +#ifdef HAVE_CPUID_H +#define FIND_SIMD_IMPLEMENTATION_DEFINED 1 + +#if defined(__clang__) || defined(__GNUC__) +#define TARGET_SSE2 __attribute__((target("sse2"))) +#else +#define TARGET_SSE2 +#endif + +#define _mm_cmpge_epu8(a, b) _mm_cmpeq_epi8(_mm_max_epu8(a, b), a) +#define _mm_cmple_epu8(a, b) _mm_cmpge_epu8(b, a) +#define _mm_cmpgt_epu8(a, b) _mm_xor_si128(_mm_cmple_epu8(a, b), _mm_set1_epi8(-1)) +#define _mm_cmplt_epu8(a, b) _mm_cmpgt_epu8(b, a) + +static inline TARGET_SSE2 FORCE_INLINE int compute_chunk_mask_sse2(const char *ptr) +{ + __m128i chunk = _mm_loadu_si128((__m128i const*)ptr); + // Trick: c < 32 || c == 34 can be factored as c ^ 2 < 33 + // https://lemire.me/blog/2025/04/13/detect-control-characters-quotes-and-backslashes-efficiently-using-swar/ + __m128i too_low_or_dbl_quote = _mm_cmplt_epu8(_mm_xor_si128(chunk, _mm_set1_epi8(2)), _mm_set1_epi8(33)); + __m128i has_backslash = _mm_cmpeq_epi8(chunk, _mm_set1_epi8('\\')); + __m128i needs_escape = _mm_or_si128(too_low_or_dbl_quote, has_backslash); + return _mm_movemask_epi8(needs_escape); +} + +static inline TARGET_SSE2 FORCE_INLINE int string_scan_simd_sse2(const char **ptr, const char *end, int *mask) +{ + while (*ptr + sizeof(__m128i) <= end) { + int chunk_mask = compute_chunk_mask_sse2(*ptr); + if (chunk_mask) { + *mask = chunk_mask; + return 1; + } + *ptr += sizeof(__m128i); + } + + return 0; +} + +#include +#endif /* HAVE_CPUID_H */ + +static inline SIMD_Implementation find_simd_implementation(void) +{ + // TODO Revisit. I think the SSE version now only uses SSE2 instructions. + if (__builtin_cpu_supports("sse2")) { + return SIMD_SSE2; + } + + return SIMD_NONE; +} + +#endif /* HAVE_X86INTRIN_H */ +#endif /* X86_64 Support */ + +#endif /* JSON_ENABLE_SIMD */ + +#ifndef FIND_SIMD_IMPLEMENTATION_DEFINED +static inline SIMD_Implementation find_simd_implementation(void) +{ + return SIMD_NONE; +} +#endif diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/fpconv.c b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/fpconv.c new file mode 100644 index 000000000..75efd46f1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/fpconv.c @@ -0,0 +1,479 @@ +// Boost Software License - Version 1.0 - August 17th, 2003 +// +// Permission is hereby granted, free of charge, to any person or organization +// obtaining a copy of the software and accompanying documentation covered by +// this license (the "Software") to use, reproduce, display, distribute, +// execute, and transmit the Software, and to prepare derivative works of the +// Software, and to permit third-parties to whom the Software is furnished to +// do so, all subject to the following: +// +// The copyright notices in the Software and this entire statement, including +// the above license grant, this restriction and the following disclaimer, +// must be included in all copies of the Software, in whole or in part, and +// all derivative works of the Software, unless such copies or derivative +// works are solely in the form of machine-executable object code generated by +// a source language processor. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +// SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +// FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +// DEALINGS IN THE SOFTWARE. + +// The contents of this file is extracted from https://github.com/night-shift/fpconv +// It was slightly modified to append ".0" to plain floats, for use with the https://github.com/ruby/json package. + +#include +#include +#include + +#define npowers 87 +#define steppowers 8 +#define firstpower -348 /* 10 ^ -348 */ + +#define expmax -32 +#define expmin -60 + +typedef struct Fp { + uint64_t frac; + int exp; +} Fp; + +static const Fp powers_ten[] = { + { 18054884314459144840U, -1220 }, { 13451937075301367670U, -1193 }, + { 10022474136428063862U, -1166 }, { 14934650266808366570U, -1140 }, + { 11127181549972568877U, -1113 }, { 16580792590934885855U, -1087 }, + { 12353653155963782858U, -1060 }, { 18408377700990114895U, -1034 }, + { 13715310171984221708U, -1007 }, { 10218702384817765436U, -980 }, + { 15227053142812498563U, -954 }, { 11345038669416679861U, -927 }, + { 16905424996341287883U, -901 }, { 12595523146049147757U, -874 }, + { 9384396036005875287U, -847 }, { 13983839803942852151U, -821 }, + { 10418772551374772303U, -794 }, { 15525180923007089351U, -768 }, + { 11567161174868858868U, -741 }, { 17236413322193710309U, -715 }, + { 12842128665889583758U, -688 }, { 9568131466127621947U, -661 }, + { 14257626930069360058U, -635 }, { 10622759856335341974U, -608 }, + { 15829145694278690180U, -582 }, { 11793632577567316726U, -555 }, + { 17573882009934360870U, -529 }, { 13093562431584567480U, -502 }, + { 9755464219737475723U, -475 }, { 14536774485912137811U, -449 }, + { 10830740992659433045U, -422 }, { 16139061738043178685U, -396 }, + { 12024538023802026127U, -369 }, { 17917957937422433684U, -343 }, + { 13349918974505688015U, -316 }, { 9946464728195732843U, -289 }, + { 14821387422376473014U, -263 }, { 11042794154864902060U, -236 }, + { 16455045573212060422U, -210 }, { 12259964326927110867U, -183 }, + { 18268770466636286478U, -157 }, { 13611294676837538539U, -130 }, + { 10141204801825835212U, -103 }, { 15111572745182864684U, -77 }, + { 11258999068426240000U, -50 }, { 16777216000000000000U, -24 }, + { 12500000000000000000U, 3 }, { 9313225746154785156U, 30 }, + { 13877787807814456755U, 56 }, { 10339757656912845936U, 83 }, + { 15407439555097886824U, 109 }, { 11479437019748901445U, 136 }, + { 17105694144590052135U, 162 }, { 12744735289059618216U, 189 }, + { 9495567745759798747U, 216 }, { 14149498560666738074U, 242 }, + { 10542197943230523224U, 269 }, { 15709099088952724970U, 295 }, + { 11704190886730495818U, 322 }, { 17440603504673385349U, 348 }, + { 12994262207056124023U, 375 }, { 9681479787123295682U, 402 }, + { 14426529090290212157U, 428 }, { 10748601772107342003U, 455 }, + { 16016664761464807395U, 481 }, { 11933345169920330789U, 508 }, + { 17782069995880619868U, 534 }, { 13248674568444952270U, 561 }, + { 9871031767461413346U, 588 }, { 14708983551653345445U, 614 }, + { 10959046745042015199U, 641 }, { 16330252207878254650U, 667 }, + { 12166986024289022870U, 694 }, { 18130221999122236476U, 720 }, + { 13508068024458167312U, 747 }, { 10064294952495520794U, 774 }, + { 14996968138956309548U, 800 }, { 11173611982879273257U, 827 }, + { 16649979327439178909U, 853 }, { 12405201291620119593U, 880 }, + { 9242595204427927429U, 907 }, { 13772540099066387757U, 933 }, + { 10261342003245940623U, 960 }, { 15290591125556738113U, 986 }, + { 11392378155556871081U, 1013 }, { 16975966327722178521U, 1039 }, + { 12648080533535911531U, 1066 } +}; + +static Fp find_cachedpow10(int exp, int* k) +{ + const double one_log_ten = 0.30102999566398114; + + int approx = (int)(-(exp + npowers) * one_log_ten); + int idx = (approx - firstpower) / steppowers; + + while(1) { + int current = exp + powers_ten[idx].exp + 64; + + if(current < expmin) { + idx++; + continue; + } + + if(current > expmax) { + idx--; + continue; + } + + *k = (firstpower + idx * steppowers); + + return powers_ten[idx]; + } +} + +#define fracmask 0x000FFFFFFFFFFFFFU +#define expmask 0x7FF0000000000000U +#define hiddenbit 0x0010000000000000U +#define signmask 0x8000000000000000U +#define expbias (1023 + 52) + +#define absv(n) ((n) < 0 ? -(n) : (n)) +#define minv(a, b) ((a) < (b) ? (a) : (b)) + +static const uint64_t tens[] = { + 10000000000000000000U, 1000000000000000000U, 100000000000000000U, + 10000000000000000U, 1000000000000000U, 100000000000000U, + 10000000000000U, 1000000000000U, 100000000000U, + 10000000000U, 1000000000U, 100000000U, + 10000000U, 1000000U, 100000U, + 10000U, 1000U, 100U, + 10U, 1U +}; + +static inline uint64_t get_dbits(double d) +{ + union { + double dbl; + uint64_t i; + } dbl_bits = { d }; + + return dbl_bits.i; +} + +static Fp build_fp(double d) +{ + uint64_t bits = get_dbits(d); + + Fp fp; + fp.frac = bits & fracmask; + fp.exp = (bits & expmask) >> 52; + + if(fp.exp) { + fp.frac += hiddenbit; + fp.exp -= expbias; + + } else { + fp.exp = -expbias + 1; + } + + return fp; +} + +static void normalize(Fp* fp) +{ + while ((fp->frac & hiddenbit) == 0) { + fp->frac <<= 1; + fp->exp--; + } + + int shift = 64 - 52 - 1; + fp->frac <<= shift; + fp->exp -= shift; +} + +static void get_normalized_boundaries(Fp* fp, Fp* lower, Fp* upper) +{ + upper->frac = (fp->frac << 1) + 1; + upper->exp = fp->exp - 1; + + while ((upper->frac & (hiddenbit << 1)) == 0) { + upper->frac <<= 1; + upper->exp--; + } + + int u_shift = 64 - 52 - 2; + + upper->frac <<= u_shift; + upper->exp = upper->exp - u_shift; + + + int l_shift = fp->frac == hiddenbit ? 2 : 1; + + lower->frac = (fp->frac << l_shift) - 1; + lower->exp = fp->exp - l_shift; + + + lower->frac <<= lower->exp - upper->exp; + lower->exp = upper->exp; +} + +static Fp multiply(Fp* a, Fp* b) +{ + const uint64_t lomask = 0x00000000FFFFFFFF; + + uint64_t ah_bl = (a->frac >> 32) * (b->frac & lomask); + uint64_t al_bh = (a->frac & lomask) * (b->frac >> 32); + uint64_t al_bl = (a->frac & lomask) * (b->frac & lomask); + uint64_t ah_bh = (a->frac >> 32) * (b->frac >> 32); + + uint64_t tmp = (ah_bl & lomask) + (al_bh & lomask) + (al_bl >> 32); + /* round up */ + tmp += 1U << 31; + + Fp fp = { + ah_bh + (ah_bl >> 32) + (al_bh >> 32) + (tmp >> 32), + a->exp + b->exp + 64 + }; + + return fp; +} + +static void round_digit(char* digits, int ndigits, uint64_t delta, uint64_t rem, uint64_t kappa, uint64_t frac) +{ + while (rem < frac && delta - rem >= kappa && + (rem + kappa < frac || frac - rem > rem + kappa - frac)) { + + digits[ndigits - 1]--; + rem += kappa; + } +} + +static int generate_digits(Fp* fp, Fp* upper, Fp* lower, char* digits, int* K) +{ + uint64_t wfrac = upper->frac - fp->frac; + uint64_t delta = upper->frac - lower->frac; + + Fp one; + one.frac = 1ULL << -upper->exp; + one.exp = upper->exp; + + uint64_t part1 = upper->frac >> -one.exp; + uint64_t part2 = upper->frac & (one.frac - 1); + + int idx = 0, kappa = 10; + const uint64_t* divp; + /* 1000000000 */ + for(divp = tens + 10; kappa > 0; divp++) { + + uint64_t div = *divp; + unsigned digit = (unsigned) (part1 / div); + + if (digit || idx) { + digits[idx++] = digit + '0'; + } + + part1 -= digit * div; + kappa--; + + uint64_t tmp = (part1 <<-one.exp) + part2; + if (tmp <= delta) { + *K += kappa; + round_digit(digits, idx, delta, tmp, div << -one.exp, wfrac); + + return idx; + } + } + + /* 10 */ + const uint64_t* unit = tens + 18; + + while(true) { + part2 *= 10; + delta *= 10; + kappa--; + + unsigned digit = (unsigned) (part2 >> -one.exp); + if (digit || idx) { + digits[idx++] = digit + '0'; + } + + part2 &= one.frac - 1; + if (part2 < delta) { + *K += kappa; + round_digit(digits, idx, delta, part2, one.frac, wfrac * *unit); + + return idx; + } + + unit--; + } +} + +static int grisu2(double d, char* digits, int* K) +{ + Fp w = build_fp(d); + + Fp lower, upper; + get_normalized_boundaries(&w, &lower, &upper); + + normalize(&w); + + int k; + Fp cp = find_cachedpow10(upper.exp, &k); + + w = multiply(&w, &cp); + upper = multiply(&upper, &cp); + lower = multiply(&lower, &cp); + + lower.frac++; + upper.frac--; + + *K = -k; + + return generate_digits(&w, &upper, &lower, digits, K); +} + +static int emit_digits(char* digits, int ndigits, char* dest, int K, bool neg) +{ + int exp = absv(K + ndigits - 1); + + int max_trailing_zeros = 7; + + if(neg) { + max_trailing_zeros -= 1; + } + + /* write plain integer */ + if(K >= 0 && (exp < (ndigits + max_trailing_zeros))) { + + memcpy(dest, digits, ndigits); + memset(dest + ndigits, '0', K); + + /* add a .0 to mark this as a float. */ + dest[ndigits + K] = '.'; + dest[ndigits + K + 1] = '0'; + + return ndigits + K + 2; + } + + /* write decimal w/o scientific notation */ + if(K < 0 && (K > -7 || exp < 10)) { + int offset = ndigits - absv(K); + /* fp < 1.0 -> write leading zero */ + if(offset <= 0) { + offset = -offset; + dest[0] = '0'; + dest[1] = '.'; + memset(dest + 2, '0', offset); + memcpy(dest + offset + 2, digits, ndigits); + + return ndigits + 2 + offset; + + /* fp > 1.0 */ + } else { + memcpy(dest, digits, offset); + dest[offset] = '.'; + memcpy(dest + offset + 1, digits + offset, ndigits - offset); + + return ndigits + 1; + } + } + + /* write decimal w/ scientific notation */ + ndigits = minv(ndigits, 18 - neg); + + int idx = 0; + dest[idx++] = digits[0]; + + if(ndigits > 1) { + dest[idx++] = '.'; + memcpy(dest + idx, digits + 1, ndigits - 1); + idx += ndigits - 1; + } + + dest[idx++] = 'e'; + + char sign = K + ndigits - 1 < 0 ? '-' : '+'; + dest[idx++] = sign; + + int cent = 0; + + if(exp > 99) { + cent = exp / 100; + dest[idx++] = cent + '0'; + exp -= cent * 100; + } + if(exp > 9) { + int dec = exp / 10; + dest[idx++] = dec + '0'; + exp -= dec * 10; + + } else if(cent) { + dest[idx++] = '0'; + } + + dest[idx++] = exp % 10 + '0'; + + return idx; +} + +static int filter_special(double fp, char* dest) +{ + if(fp == 0.0) { + dest[0] = '0'; + dest[1] = '.'; + dest[2] = '0'; + return 3; + } + + uint64_t bits = get_dbits(fp); + + bool nan = (bits & expmask) == expmask; + + if(!nan) { + return 0; + } + + if(bits & fracmask) { + dest[0] = 'n'; dest[1] = 'a'; dest[2] = 'n'; + + } else { + dest[0] = 'i'; dest[1] = 'n'; dest[2] = 'f'; + } + + return 3; +} + +/* Fast and accurate double to string conversion based on Florian Loitsch's + * Grisu-algorithm[1]. + * + * Input: + * fp -> the double to convert, dest -> destination buffer. + * The generated string will never be longer than 28 characters. + * Make sure to pass a pointer to at least 28 bytes of memory. + * The emitted string will not be null terminated. + * + * Output: + * The number of written characters. + * + * Exemplary usage: + * + * void print(double d) + * { + * char buf[28 + 1] // plus null terminator + * int str_len = fpconv_dtoa(d, buf); + * + * buf[str_len] = '\0'; + * printf("%s", buf); + * } + * + */ +static int fpconv_dtoa(double d, char dest[28]) +{ + char digits[18]; + + int str_len = 0; + bool neg = false; + + if(get_dbits(d) & signmask) { + dest[0] = '-'; + str_len++; + neg = true; + } + + int spec = filter_special(d, dest + str_len); + + if(spec) { + return str_len + spec; + } + + int K = 0; + int ndigits = grisu2(d, digits, &K); + + str_len += emit_digits(digits, ndigits, dest + str_len, K, neg); + + return str_len; +} diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/jeaiii-ltoa.h b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/jeaiii-ltoa.h new file mode 100644 index 000000000..ba4f497fc --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/ext/json/ext/vendor/jeaiii-ltoa.h @@ -0,0 +1,267 @@ +/* + +This file is released under the terms of the MIT License. It is based on the +work of James Edward Anhalt III, with the original license listed below. + +MIT License + +Copyright (c) 2024,2025 Enrico Thierbach - https://github.com/radiospiel +Copyright (c) 2022 James Edward Anhalt III - https://github.com/jeaiii/itoa + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +#ifndef JEAIII_TO_TEXT_H_ +#define JEAIII_TO_TEXT_H_ + +#include + +typedef uint_fast32_t u32_t; +typedef uint_fast64_t u64_t; + +#define u32(x) ((u32_t)(x)) +#define u64(x) ((u64_t)(x)) + +struct digit_pair +{ + char dd[2]; +}; + +static const struct digit_pair *digits_dd = (struct digit_pair *)( + "00" "01" "02" "03" "04" "05" "06" "07" "08" "09" + "10" "11" "12" "13" "14" "15" "16" "17" "18" "19" + "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" + "30" "31" "32" "33" "34" "35" "36" "37" "38" "39" + "40" "41" "42" "43" "44" "45" "46" "47" "48" "49" + "50" "51" "52" "53" "54" "55" "56" "57" "58" "59" + "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" + "70" "71" "72" "73" "74" "75" "76" "77" "78" "79" + "80" "81" "82" "83" "84" "85" "86" "87" "88" "89" + "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" +); + +static const struct digit_pair *digits_fd = (struct digit_pair *)( + "0_" "1_" "2_" "3_" "4_" "5_" "6_" "7_" "8_" "9_" + "10" "11" "12" "13" "14" "15" "16" "17" "18" "19" + "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" + "30" "31" "32" "33" "34" "35" "36" "37" "38" "39" + "40" "41" "42" "43" "44" "45" "46" "47" "48" "49" + "50" "51" "52" "53" "54" "55" "56" "57" "58" "59" + "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" + "70" "71" "72" "73" "74" "75" "76" "77" "78" "79" + "80" "81" "82" "83" "84" "85" "86" "87" "88" "89" + "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" +); + +static const u64_t mask24 = (u64(1) << 24) - 1; +static const u64_t mask32 = (u64(1) << 32) - 1; +static const u64_t mask57 = (u64(1) << 57) - 1; + +#define COPY(buffer, digits) memcpy(buffer, &(digits), sizeof(struct digit_pair)) + +static char * +jeaiii_ultoa(char *b, u64_t n) +{ + if (n < u32(1e2)) { + COPY(b, digits_fd[n]); + return n < 10 ? b + 1 : b + 2; + } + + if (n < u32(1e6)) { + if (n < u32(1e4)) { + u32_t f0 = u32((10 * (1 << 24) / 1e3 + 1) * n); + COPY(b, digits_fd[f0 >> 24]); + + b -= n < u32(1e3); + u32_t f2 = (f0 & mask24) * 100; + COPY(b + 2, digits_dd[f2 >> 24]); + + return b + 4; + } + + u64_t f0 = u64(10 * (1ull << 32ull)/ 1e5 + 1) * n; + COPY(b, digits_fd[f0 >> 32]); + + b -= n < u32(1e5); + u64_t f2 = (f0 & mask32) * 100; + COPY(b + 2, digits_dd[f2 >> 32]); + + u64_t f4 = (f2 & mask32) * 100; + COPY(b + 4, digits_dd[f4 >> 32]); + return b + 6; + } + + if (n < u64(1ull << 32ull)) { + if (n < u32(1e8)) { + u64_t f0 = u64(10 * (1ull << 48ull) / 1e7 + 1) * n >> 16; + COPY(b, digits_fd[f0 >> 32]); + + b -= n < u32(1e7); + u64_t f2 = (f0 & mask32) * 100; + COPY(b + 2, digits_dd[f2 >> 32]); + + u64_t f4 = (f2 & mask32) * 100; + COPY(b + 4, digits_dd[f4 >> 32]); + + u64_t f6 = (f4 & mask32) * 100; + COPY(b + 6, digits_dd[f6 >> 32]); + + return b + 8; + } + + u64_t f0 = u64(10 * (1ull << 57ull) / 1e9 + 1) * n; + COPY(b, digits_fd[f0 >> 57]); + + b -= n < u32(1e9); + u64_t f2 = (f0 & mask57) * 100; + COPY(b + 2, digits_dd[f2 >> 57]); + + u64_t f4 = (f2 & mask57) * 100; + COPY(b + 4, digits_dd[f4 >> 57]); + + u64_t f6 = (f4 & mask57) * 100; + COPY(b + 6, digits_dd[f6 >> 57]); + + u64_t f8 = (f6 & mask57) * 100; + COPY(b + 8, digits_dd[f8 >> 57]); + + return b + 10; + } + + // if we get here U must be u64 but some compilers don't know that, so reassign n to a u64 to avoid warnings + u32_t z = n % u32(1e8); + u64_t u = n / u32(1e8); + + if (u < u32(1e2)) { + // u can't be 1 digit (if u < 10 it would have been handled above as a 9 digit 32bit number) + COPY(b, digits_dd[u]); + b += 2; + } + else if (u < u32(1e6)) { + if (u < u32(1e4)) { + u32_t f0 = u32((10 * (1 << 24) / 1e3 + 1) * u); + COPY(b, digits_fd[f0 >> 24]); + + b -= u < u32(1e3); + u32_t f2 = (f0 & mask24) * 100; + COPY(b + 2, digits_dd[f2 >> 24]); + b += 4; + } + else { + u64_t f0 = u64(10 * (1ull << 32ull) / 1e5 + 1) * u; + COPY(b, digits_fd[f0 >> 32]); + + b -= u < u32(1e5); + u64_t f2 = (f0 & mask32) * 100; + COPY(b + 2, digits_dd[f2 >> 32]); + + u64_t f4 = (f2 & mask32) * 100; + COPY(b + 4, digits_dd[f4 >> 32]); + b += 6; + } + } + else if (u < u32(1e8)) { + u64_t f0 = u64(10 * (1ull << 48ull) / 1e7 + 1) * u >> 16; + COPY(b, digits_fd[f0 >> 32]); + + b -= u < u32(1e7); + u64_t f2 = (f0 & mask32) * 100; + COPY(b + 2, digits_dd[f2 >> 32]); + + u64_t f4 = (f2 & mask32) * 100; + COPY(b + 4, digits_dd[f4 >> 32]); + + u64_t f6 = (f4 & mask32) * 100; + COPY(b + 6, digits_dd[f6 >> 32]); + + b += 8; + } + else if (u < u64(1ull << 32ull)) { + u64_t f0 = u64(10 * (1ull << 57ull) / 1e9 + 1) * u; + COPY(b, digits_fd[f0 >> 57]); + + b -= u < u32(1e9); + u64_t f2 = (f0 & mask57) * 100; + COPY(b + 2, digits_dd[f2 >> 57]); + + u64_t f4 = (f2 & mask57) * 100; + COPY(b + 4, digits_dd[f4 >> 57]); + + u64_t f6 = (f4 & mask57) * 100; + COPY(b + 6, digits_dd[f6 >> 57]); + + u64_t f8 = (f6 & mask57) * 100; + COPY(b + 8, digits_dd[f8 >> 57]); + b += 10; + } + else { + u32_t y = u % u32(1e8); + u /= u32(1e8); + + // u is 2, 3, or 4 digits (if u < 10 it would have been handled above) + if (u < u32(1e2)) { + COPY(b, digits_dd[u]); + b += 2; + } + else { + u32_t f0 = u32((10 * (1 << 24) / 1e3 + 1) * u); + COPY(b, digits_fd[f0 >> 24]); + + b -= u < u32(1e3); + u32_t f2 = (f0 & mask24) * 100; + COPY(b + 2, digits_dd[f2 >> 24]); + + b += 4; + } + // do 8 digits + u64_t f0 = (u64((1ull << 48ull) / 1e6 + 1) * y >> 16) + 1; + COPY(b, digits_dd[f0 >> 32]); + + u64_t f2 = (f0 & mask32) * 100; + COPY(b + 2, digits_dd[f2 >> 32]); + + u64_t f4 = (f2 & mask32) * 100; + COPY(b + 4, digits_dd[f4 >> 32]); + + u64_t f6 = (f4 & mask32) * 100; + COPY(b + 6, digits_dd[f6 >> 32]); + b += 8; + } + + // do 8 digits + u64_t f0 = (u64((1ull << 48ull) / 1e6 + 1) * z >> 16) + 1; + COPY(b, digits_dd[f0 >> 32]); + + u64_t f2 = (f0 & mask32) * 100; + COPY(b + 2, digits_dd[f2 >> 32]); + + u64_t f4 = (f2 & mask32) * 100; + COPY(b + 4, digits_dd[f4 >> 32]); + + u64_t f6 = (f4 & mask32) * 100; + COPY(b + 6, digits_dd[f6 >> 32]); + + return b + 8; +} + +#undef u32 +#undef u64 +#undef COPY + +#endif // JEAIII_TO_TEXT_H_ diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/json.gemspec b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/json.gemspec new file mode 100644 index 000000000..557573102 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/json.gemspec @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +version = File.foreach(File.join(__dir__, "lib/json/version.rb")) do |line| + /^\s*VERSION\s*=\s*'(.*)'/ =~ line and break $1 +end rescue nil + +spec = Gem::Specification.new do |s| + java_ext = Gem::Platform === s.platform && s.platform =~ 'java' || RUBY_ENGINE == 'jruby' + + s.name = "json" + s.version = version + + s.summary = "JSON Implementation for Ruby" + s.homepage = "https://github.com/ruby/json" + s.metadata = { + 'bug_tracker_uri' => 'https://github.com/ruby/json/issues', + 'changelog_uri' => 'https://github.com/ruby/json/blob/master/CHANGES.md', + 'documentation_uri' => 'https://docs.ruby-lang.org/en/master/JSON.html', + 'homepage_uri' => s.homepage, + 'source_code_uri' => 'https://github.com/ruby/json', + } + + s.required_ruby_version = Gem::Requirement.new(">= 2.7") + + if java_ext + s.description = "A JSON implementation as a JRuby extension." + s.author = "Daniel Luz" + s.email = "dev+ruby@mernen.com" + else + s.description = "This is a JSON implementation as a Ruby extension in C." + s.authors = ["Florian Frank"] + s.email = "flori@ping.de" + end + + s.licenses = ["Ruby"] + + s.extra_rdoc_files = ["README.md"] + s.rdoc_options = ["--title", "JSON implementation for Ruby", "--main", "README.md"] + + s.files = [ + "CHANGES.md", + "COPYING", + "BSDL", + "LEGAL", + "README.md", + "json.gemspec", + ] + Dir.glob("lib/**/*.rb", base: File.expand_path("..", __FILE__)) + + if java_ext + s.platform = 'java' + s.files += Dir["lib/json/ext/**/*.jar"] + else + s.extensions = Dir["ext/json/**/extconf.rb"] + s.files += Dir["ext/json/**/*.{c,h,rb}"] + end +end + +if RUBY_ENGINE == 'jruby' && $0 == __FILE__ + Gem::Builder.new(spec).build +else + spec +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json.rb new file mode 100644 index 000000000..735f23806 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json.rb @@ -0,0 +1,620 @@ +# frozen_string_literal: true +require 'json/common' + +## +# = JavaScript \Object Notation (\JSON) +# +# \JSON is a lightweight data-interchange format. +# +# A \JSON value is one of the following: +# - Double-quoted text: "foo". +# - Number: +1+, +1.0+, +2.0e2+. +# - Boolean: +true+, +false+. +# - Null: +null+. +# - \Array: an ordered list of values, enclosed by square brackets: +# ["foo", 1, 1.0, 2.0e2, true, false, null] +# +# - \Object: a collection of name/value pairs, enclosed by curly braces; +# each name is double-quoted text; +# the values may be any \JSON values: +# {"a": "foo", "b": 1, "c": 1.0, "d": 2.0e2, "e": true, "f": false, "g": null} +# +# A \JSON array or object may contain nested arrays, objects, and scalars +# to any depth: +# {"foo": {"bar": 1, "baz": 2}, "bat": [0, 1, 2]} +# [{"foo": 0, "bar": 1}, ["baz", 2]] +# +# == Using \Module \JSON +# +# To make module \JSON available in your code, begin with: +# require 'json' +# +# All examples here assume that this has been done. +# +# === Parsing \JSON +# +# You can parse a \String containing \JSON data using +# either of two methods: +# - JSON.parse(source, opts) +# - JSON.parse!(source, opts) +# +# where +# - +source+ is a Ruby object. +# - +opts+ is a \Hash object containing options +# that control both input allowed and output formatting. +# +# The difference between the two methods +# is that JSON.parse! omits some checks +# and may not be safe for some +source+ data; +# use it only for data from trusted sources. +# Use the safer method JSON.parse for less trusted sources. +# +# ==== Parsing \JSON Arrays +# +# When +source+ is a \JSON array, JSON.parse by default returns a Ruby \Array: +# json = '["foo", 1, 1.0, 2.0e2, true, false, null]' +# ruby = JSON.parse(json) +# ruby # => ["foo", 1, 1.0, 200.0, true, false, nil] +# ruby.class # => Array +# +# The \JSON array may contain nested arrays, objects, and scalars +# to any depth: +# json = '[{"foo": 0, "bar": 1}, ["baz", 2]]' +# JSON.parse(json) # => [{"foo"=>0, "bar"=>1}, ["baz", 2]] +# +# ==== Parsing \JSON \Objects +# +# When the source is a \JSON object, JSON.parse by default returns a Ruby \Hash: +# json = '{"a": "foo", "b": 1, "c": 1.0, "d": 2.0e2, "e": true, "f": false, "g": null}' +# ruby = JSON.parse(json) +# ruby # => {"a"=>"foo", "b"=>1, "c"=>1.0, "d"=>200.0, "e"=>true, "f"=>false, "g"=>nil} +# ruby.class # => Hash +# +# The \JSON object may contain nested arrays, objects, and scalars +# to any depth: +# json = '{"foo": {"bar": 1, "baz": 2}, "bat": [0, 1, 2]}' +# JSON.parse(json) # => {"foo"=>{"bar"=>1, "baz"=>2}, "bat"=>[0, 1, 2]} +# +# ==== Parsing \JSON Scalars +# +# When the source is a \JSON scalar (not an array or object), +# JSON.parse returns a Ruby scalar. +# +# \String: +# ruby = JSON.parse('"foo"') +# ruby # => 'foo' +# ruby.class # => String +# \Integer: +# ruby = JSON.parse('1') +# ruby # => 1 +# ruby.class # => Integer +# \Float: +# ruby = JSON.parse('1.0') +# ruby # => 1.0 +# ruby.class # => Float +# ruby = JSON.parse('2.0e2') +# ruby # => 200 +# ruby.class # => Float +# Boolean: +# ruby = JSON.parse('true') +# ruby # => true +# ruby.class # => TrueClass +# ruby = JSON.parse('false') +# ruby # => false +# ruby.class # => FalseClass +# Null: +# ruby = JSON.parse('null') +# ruby # => nil +# ruby.class # => NilClass +# +# ==== Parsing Options +# +# ====== Input Options +# +# Option +max_nesting+ (\Integer) specifies the maximum nesting depth allowed; +# defaults to +100+; specify +false+ to disable depth checking. +# +# With the default, +false+: +# source = '[0, [1, [2, [3]]]]' +# ruby = JSON.parse(source) +# ruby # => [0, [1, [2, [3]]]] +# Too deep: +# # Raises JSON::NestingError (nesting of 2 is too deep): +# JSON.parse(source, {max_nesting: 1}) +# Bad value: +# # Raises TypeError (wrong argument type Symbol (expected Fixnum)): +# JSON.parse(source, {max_nesting: :foo}) +# +# --- +# +# Option +allow_duplicate_key+ specifies whether duplicate keys in objects +# should be ignored or cause an error to be raised: +# +# When not specified: +# # The last value is used and a deprecation warning emitted. +# JSON.parse('{"a": 1, "a":2}') => {"a" => 2} +# # waring: detected duplicate keys in JSON object. +# # This will raise an error in json 3.0 unless enabled via `allow_duplicate_key: true` +# +# When set to `+true+` +# # The last value is used. +# JSON.parse('{"a": 1, "a":2}') => {"a" => 2} +# +# When set to `+false+`, the future default: +# JSON.parse('{"a": 1, "a":2}') => duplicate key at line 1 column 1 (JSON::ParserError) +# +# --- +# +# Option +allow_nan+ (boolean) specifies whether to allow +# NaN, Infinity, and MinusInfinity in +source+; +# defaults to +false+. +# +# With the default, +false+: +# # Raises JSON::ParserError (225: unexpected token at '[NaN]'): +# JSON.parse('[NaN]') +# # Raises JSON::ParserError (232: unexpected token at '[Infinity]'): +# JSON.parse('[Infinity]') +# # Raises JSON::ParserError (248: unexpected token at '[-Infinity]'): +# JSON.parse('[-Infinity]') +# Allow: +# source = '[NaN, Infinity, -Infinity]' +# ruby = JSON.parse(source, {allow_nan: true}) +# ruby # => [NaN, Infinity, -Infinity] +# +# --- +# +# Option +allow_trailing_comma+ (boolean) specifies whether to allow +# trailing commas in objects and arrays; +# defaults to +false+. +# +# With the default, +false+: +# JSON.parse('[1,]') # unexpected character: ']' at line 1 column 4 (JSON::ParserError) +# +# When enabled: +# JSON.parse('[1,]', allow_trailing_comma: true) # => [1] +# +# ====== Output Options +# +# Option +freeze+ (boolean) specifies whether the returned objects will be frozen; +# defaults to +false+. +# +# Option +symbolize_names+ (boolean) specifies whether returned \Hash keys +# should be Symbols; +# defaults to +false+ (use Strings). +# +# With the default, +false+: +# source = '{"a": "foo", "b": 1.0, "c": true, "d": false, "e": null}' +# ruby = JSON.parse(source) +# ruby # => {"a"=>"foo", "b"=>1.0, "c"=>true, "d"=>false, "e"=>nil} +# Use Symbols: +# ruby = JSON.parse(source, {symbolize_names: true}) +# ruby # => {:a=>"foo", :b=>1.0, :c=>true, :d=>false, :e=>nil} +# +# --- +# +# Option +object_class+ (\Class) specifies the Ruby class to be used +# for each \JSON object; +# defaults to \Hash. +# +# With the default, \Hash: +# source = '{"a": "foo", "b": 1.0, "c": true, "d": false, "e": null}' +# ruby = JSON.parse(source) +# ruby.class # => Hash +# Use class \OpenStruct: +# ruby = JSON.parse(source, {object_class: OpenStruct}) +# ruby # => # +# +# --- +# +# Option +array_class+ (\Class) specifies the Ruby class to be used +# for each \JSON array; +# defaults to \Array. +# +# With the default, \Array: +# source = '["foo", 1.0, true, false, null]' +# ruby = JSON.parse(source) +# ruby.class # => Array +# Use class \Set: +# ruby = JSON.parse(source, {array_class: Set}) +# ruby # => # +# +# --- +# +# Option +create_additions+ (boolean) specifies whether to use \JSON additions in parsing. +# See {\JSON Additions}[#module-JSON-label-JSON+Additions]. +# +# === Generating \JSON +# +# To generate a Ruby \String containing \JSON data, +# use method JSON.generate(source, opts), where +# - +source+ is a Ruby object. +# - +opts+ is a \Hash object containing options +# that control both input allowed and output formatting. +# +# ==== Generating \JSON from Arrays +# +# When the source is a Ruby \Array, JSON.generate returns +# a \String containing a \JSON array: +# ruby = [0, 's', :foo] +# json = JSON.generate(ruby) +# json # => '[0,"s","foo"]' +# +# The Ruby \Array array may contain nested arrays, hashes, and scalars +# to any depth: +# ruby = [0, [1, 2], {foo: 3, bar: 4}] +# json = JSON.generate(ruby) +# json # => '[0,[1,2],{"foo":3,"bar":4}]' +# +# ==== Generating \JSON from Hashes +# +# When the source is a Ruby \Hash, JSON.generate returns +# a \String containing a \JSON object: +# ruby = {foo: 0, bar: 's', baz: :bat} +# json = JSON.generate(ruby) +# json # => '{"foo":0,"bar":"s","baz":"bat"}' +# +# The Ruby \Hash array may contain nested arrays, hashes, and scalars +# to any depth: +# ruby = {foo: [0, 1], bar: {baz: 2, bat: 3}, bam: :bad} +# json = JSON.generate(ruby) +# json # => '{"foo":[0,1],"bar":{"baz":2,"bat":3},"bam":"bad"}' +# +# ==== Generating \JSON from Other Objects +# +# When the source is neither an \Array nor a \Hash, +# the generated \JSON data depends on the class of the source. +# +# When the source is a Ruby \Integer or \Float, JSON.generate returns +# a \String containing a \JSON number: +# JSON.generate(42) # => '42' +# JSON.generate(0.42) # => '0.42' +# +# When the source is a Ruby \String, JSON.generate returns +# a \String containing a \JSON string (with double-quotes): +# JSON.generate('A string') # => '"A string"' +# +# When the source is +true+, +false+ or +nil+, JSON.generate returns +# a \String containing the corresponding \JSON token: +# JSON.generate(true) # => 'true' +# JSON.generate(false) # => 'false' +# JSON.generate(nil) # => 'null' +# +# When the source is none of the above, JSON.generate returns +# a \String containing a \JSON string representation of the source: +# JSON.generate(:foo) # => '"foo"' +# JSON.generate(Complex(0, 0)) # => '"0+0i"' +# JSON.generate(Dir.new('.')) # => '"#"' +# +# ==== Generating Options +# +# ====== Input Options +# +# Option +allow_nan+ (boolean) specifies whether +# +NaN+, +Infinity+, and -Infinity may be generated; +# defaults to +false+. +# +# With the default, +false+: +# # Raises JSON::GeneratorError (920: NaN not allowed in JSON): +# JSON.generate(JSON::NaN) +# # Raises JSON::GeneratorError (917: Infinity not allowed in JSON): +# JSON.generate(JSON::Infinity) +# # Raises JSON::GeneratorError (917: -Infinity not allowed in JSON): +# JSON.generate(JSON::MinusInfinity) +# +# Allow: +# ruby = [Float::NaN, Float::Infinity, Float::MinusInfinity] +# JSON.generate(ruby, allow_nan: true) # => '[NaN,Infinity,-Infinity]' +# +# --- +# +# Option +max_nesting+ (\Integer) specifies the maximum nesting depth +# in +obj+; defaults to +100+. +# +# With the default, +100+: +# obj = [[[[[[0]]]]]] +# JSON.generate(obj) # => '[[[[[[0]]]]]]' +# +# Too deep: +# # Raises JSON::NestingError (nesting of 2 is too deep): +# JSON.generate(obj, max_nesting: 2) +# +# ====== Escaping Options +# +# Options +script_safe+ (boolean) specifies wether '\u2028', '\u2029' +# and '/' should be escaped as to make the JSON object safe to interpolate in script +# tags. +# +# Options +ascii_only+ (boolean) specifies wether all characters outside the ASCII range +# should be escaped. +# +# ====== Output Options +# +# The default formatting options generate the most compact +# \JSON data, all on one line and with no whitespace. +# +# You can use these formatting options to generate +# \JSON data in a more open format, using whitespace. +# See also JSON.pretty_generate. +# +# - Option +array_nl+ (\String) specifies a string (usually a newline) +# to be inserted after each \JSON array; defaults to the empty \String, ''. +# - Option +object_nl+ (\String) specifies a string (usually a newline) +# to be inserted after each \JSON object; defaults to the empty \String, ''. +# - Option +indent+ (\String) specifies the string (usually spaces) to be +# used for indentation; defaults to the empty \String, ''; +# defaults to the empty \String, ''; +# has no effect unless options +array_nl+ or +object_nl+ specify newlines. +# - Option +space+ (\String) specifies a string (usually a space) to be +# inserted after the colon in each \JSON object's pair; +# defaults to the empty \String, ''. +# - Option +space_before+ (\String) specifies a string (usually a space) to be +# inserted before the colon in each \JSON object's pair; +# defaults to the empty \String, ''. +# +# In this example, +obj+ is used first to generate the shortest +# \JSON data (no whitespace), then again with all formatting options +# specified: +# +# obj = {foo: [:bar, :baz], bat: {bam: 0, bad: 1}} +# json = JSON.generate(obj) +# puts 'Compact:', json +# opts = { +# array_nl: "\n", +# object_nl: "\n", +# indent: ' ', +# space_before: ' ', +# space: ' ' +# } +# puts 'Open:', JSON.generate(obj, opts) +# +# Output: +# Compact: +# {"foo":["bar","baz"],"bat":{"bam":0,"bad":1}} +# Open: +# { +# "foo" : [ +# "bar", +# "baz" +# ], +# "bat" : { +# "bam" : 0, +# "bad" : 1 +# } +# } +# +# == \JSON Additions +# +# When you "round trip" a non-\String object from Ruby to \JSON and back, +# you have a new \String, instead of the object you began with: +# ruby0 = Range.new(0, 2) +# json = JSON.generate(ruby0) +# json # => '0..2"' +# ruby1 = JSON.parse(json) +# ruby1 # => '0..2' +# ruby1.class # => String +# +# You can use \JSON _additions_ to preserve the original object. +# The addition is an extension of a ruby class, so that: +# - \JSON.generate stores more information in the \JSON string. +# - \JSON.parse, called with option +create_additions+, +# uses that information to create a proper Ruby object. +# +# This example shows a \Range being generated into \JSON +# and parsed back into Ruby, both without and with +# the addition for \Range: +# ruby = Range.new(0, 2) +# # This passage does not use the addition for Range. +# json0 = JSON.generate(ruby) +# ruby0 = JSON.parse(json0) +# # This passage uses the addition for Range. +# require 'json/add/range' +# json1 = JSON.generate(ruby) +# ruby1 = JSON.parse(json1, create_additions: true) +# # Make a nice display. +# display = <<~EOT +# Generated JSON: +# Without addition: #{json0} (#{json0.class}) +# With addition: #{json1} (#{json1.class}) +# Parsed JSON: +# Without addition: #{ruby0.inspect} (#{ruby0.class}) +# With addition: #{ruby1.inspect} (#{ruby1.class}) +# EOT +# puts display +# +# This output shows the different results: +# Generated JSON: +# Without addition: "0..2" (String) +# With addition: {"json_class":"Range","a":[0,2,false]} (String) +# Parsed JSON: +# Without addition: "0..2" (String) +# With addition: 0..2 (Range) +# +# The \JSON module includes additions for certain classes. +# You can also craft custom additions. +# See {Custom \JSON Additions}[#module-JSON-label-Custom+JSON+Additions]. +# +# === Built-in Additions +# +# The \JSON module includes additions for certain classes. +# To use an addition, +require+ its source: +# - BigDecimal: require 'json/add/bigdecimal' +# - Complex: require 'json/add/complex' +# - Date: require 'json/add/date' +# - DateTime: require 'json/add/date_time' +# - Exception: require 'json/add/exception' +# - OpenStruct: require 'json/add/ostruct' +# - Range: require 'json/add/range' +# - Rational: require 'json/add/rational' +# - Regexp: require 'json/add/regexp' +# - Set: require 'json/add/set' +# - Struct: require 'json/add/struct' +# - Symbol: require 'json/add/symbol' +# - Time: require 'json/add/time' +# +# To reduce punctuation clutter, the examples below +# show the generated \JSON via +puts+, rather than the usual +inspect+, +# +# \BigDecimal: +# require 'json/add/bigdecimal' +# ruby0 = BigDecimal(0) # 0.0 +# json = JSON.generate(ruby0) # {"json_class":"BigDecimal","b":"27:0.0"} +# ruby1 = JSON.parse(json, create_additions: true) # 0.0 +# ruby1.class # => BigDecimal +# +# \Complex: +# require 'json/add/complex' +# ruby0 = Complex(1+0i) # 1+0i +# json = JSON.generate(ruby0) # {"json_class":"Complex","r":1,"i":0} +# ruby1 = JSON.parse(json, create_additions: true) # 1+0i +# ruby1.class # Complex +# +# \Date: +# require 'json/add/date' +# ruby0 = Date.today # 2020-05-02 +# json = JSON.generate(ruby0) # {"json_class":"Date","y":2020,"m":5,"d":2,"sg":2299161.0} +# ruby1 = JSON.parse(json, create_additions: true) # 2020-05-02 +# ruby1.class # Date +# +# \DateTime: +# require 'json/add/date_time' +# ruby0 = DateTime.now # 2020-05-02T10:38:13-05:00 +# json = JSON.generate(ruby0) # {"json_class":"DateTime","y":2020,"m":5,"d":2,"H":10,"M":38,"S":13,"of":"-5/24","sg":2299161.0} +# ruby1 = JSON.parse(json, create_additions: true) # 2020-05-02T10:38:13-05:00 +# ruby1.class # DateTime +# +# \Exception (and its subclasses including \RuntimeError): +# require 'json/add/exception' +# ruby0 = Exception.new('A message') # A message +# json = JSON.generate(ruby0) # {"json_class":"Exception","m":"A message","b":null} +# ruby1 = JSON.parse(json, create_additions: true) # A message +# ruby1.class # Exception +# ruby0 = RuntimeError.new('Another message') # Another message +# json = JSON.generate(ruby0) # {"json_class":"RuntimeError","m":"Another message","b":null} +# ruby1 = JSON.parse(json, create_additions: true) # Another message +# ruby1.class # RuntimeError +# +# \OpenStruct: +# require 'json/add/ostruct' +# ruby0 = OpenStruct.new(name: 'Matz', language: 'Ruby') # # +# json = JSON.generate(ruby0) # {"json_class":"OpenStruct","t":{"name":"Matz","language":"Ruby"}} +# ruby1 = JSON.parse(json, create_additions: true) # # +# ruby1.class # OpenStruct +# +# \Range: +# require 'json/add/range' +# ruby0 = Range.new(0, 2) # 0..2 +# json = JSON.generate(ruby0) # {"json_class":"Range","a":[0,2,false]} +# ruby1 = JSON.parse(json, create_additions: true) # 0..2 +# ruby1.class # Range +# +# \Rational: +# require 'json/add/rational' +# ruby0 = Rational(1, 3) # 1/3 +# json = JSON.generate(ruby0) # {"json_class":"Rational","n":1,"d":3} +# ruby1 = JSON.parse(json, create_additions: true) # 1/3 +# ruby1.class # Rational +# +# \Regexp: +# require 'json/add/regexp' +# ruby0 = Regexp.new('foo') # (?-mix:foo) +# json = JSON.generate(ruby0) # {"json_class":"Regexp","o":0,"s":"foo"} +# ruby1 = JSON.parse(json, create_additions: true) # (?-mix:foo) +# ruby1.class # Regexp +# +# \Set: +# require 'json/add/set' +# ruby0 = Set.new([0, 1, 2]) # # +# json = JSON.generate(ruby0) # {"json_class":"Set","a":[0,1,2]} +# ruby1 = JSON.parse(json, create_additions: true) # # +# ruby1.class # Set +# +# \Struct: +# require 'json/add/struct' +# Customer = Struct.new(:name, :address) # Customer +# ruby0 = Customer.new("Dave", "123 Main") # # +# json = JSON.generate(ruby0) # {"json_class":"Customer","v":["Dave","123 Main"]} +# ruby1 = JSON.parse(json, create_additions: true) # # +# ruby1.class # Customer +# +# \Symbol: +# require 'json/add/symbol' +# ruby0 = :foo # foo +# json = JSON.generate(ruby0) # {"json_class":"Symbol","s":"foo"} +# ruby1 = JSON.parse(json, create_additions: true) # foo +# ruby1.class # Symbol +# +# \Time: +# require 'json/add/time' +# ruby0 = Time.now # 2020-05-02 11:28:26 -0500 +# json = JSON.generate(ruby0) # {"json_class":"Time","s":1588436906,"n":840560000} +# ruby1 = JSON.parse(json, create_additions: true) # 2020-05-02 11:28:26 -0500 +# ruby1.class # Time +# +# +# === Custom \JSON Additions +# +# In addition to the \JSON additions provided, +# you can craft \JSON additions of your own, +# either for Ruby built-in classes or for user-defined classes. +# +# Here's a user-defined class +Foo+: +# class Foo +# attr_accessor :bar, :baz +# def initialize(bar, baz) +# self.bar = bar +# self.baz = baz +# end +# end +# +# Here's the \JSON addition for it: +# # Extend class Foo with JSON addition. +# class Foo +# # Serialize Foo object with its class name and arguments +# def to_json(*args) +# { +# JSON.create_id => self.class.name, +# 'a' => [ bar, baz ] +# }.to_json(*args) +# end +# # Deserialize JSON string by constructing new Foo object with arguments. +# def self.json_create(object) +# new(*object['a']) +# end +# end +# +# Demonstration: +# require 'json' +# # This Foo object has no custom addition. +# foo0 = Foo.new(0, 1) +# json0 = JSON.generate(foo0) +# obj0 = JSON.parse(json0) +# # Lood the custom addition. +# require_relative 'foo_addition' +# # This foo has the custom addition. +# foo1 = Foo.new(0, 1) +# json1 = JSON.generate(foo1) +# obj1 = JSON.parse(json1, create_additions: true) +# # Make a nice display. +# display = <<~EOT +# Generated JSON: +# Without custom addition: #{json0} (#{json0.class}) +# With custom addition: #{json1} (#{json1.class}) +# Parsed JSON: +# Without custom addition: #{obj0.inspect} (#{obj0.class}) +# With custom addition: #{obj1.inspect} (#{obj1.class}) +# EOT +# puts display +# +# Output: +# +# Generated JSON: +# Without custom addition: "#" (String) +# With custom addition: {"json_class":"Foo","a":[0,1]} (String) +# Parsed JSON: +# Without custom addition: "#" (String) +# With custom addition: # (Foo) +# +module JSON + require 'json/version' + require 'json/ext' +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/bigdecimal.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/bigdecimal.rb new file mode 100644 index 000000000..5dbc12c07 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/bigdecimal.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end +begin + require 'bigdecimal' +rescue LoadError +end + +class BigDecimal + + # See #as_json. + def self.json_create(object) + BigDecimal._load object['b'] + end + + # Methods BigDecimal#as_json and +BigDecimal.json_create+ may be used + # to serialize and deserialize a \BigDecimal object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method BigDecimal#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/bigdecimal' + # x = BigDecimal(2).as_json # => {"json_class"=>"BigDecimal", "b"=>"27:0.2e1"} + # y = BigDecimal(2.0, 4).as_json # => {"json_class"=>"BigDecimal", "b"=>"36:0.2e1"} + # z = BigDecimal(Complex(2, 0)).as_json # => {"json_class"=>"BigDecimal", "b"=>"27:0.2e1"} + # + # \Method +JSON.create+ deserializes such a hash, returning a \BigDecimal object: + # + # BigDecimal.json_create(x) # => 0.2e1 + # BigDecimal.json_create(y) # => 0.2e1 + # BigDecimal.json_create(z) # => 0.2e1 + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'b' => _dump.force_encoding(Encoding::UTF_8), + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/bigdecimal' + # puts BigDecimal(2).to_json + # puts BigDecimal(2.0, 4).to_json + # puts BigDecimal(Complex(2, 0)).to_json + # + # Output: + # + # {"json_class":"BigDecimal","b":"27:0.2e1"} + # {"json_class":"BigDecimal","b":"36:0.2e1"} + # {"json_class":"BigDecimal","b":"27:0.2e1"} + # + def to_json(*args) + as_json.to_json(*args) + end +end if defined?(::BigDecimal) diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/complex.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/complex.rb new file mode 100644 index 000000000..a69002eff --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/complex.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Complex + + # See #as_json. + def self.json_create(object) + Complex(object['r'], object['i']) + end + + # Methods Complex#as_json and +Complex.json_create+ may be used + # to serialize and deserialize a \Complex object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Complex#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/complex' + # x = Complex(2).as_json # => {"json_class"=>"Complex", "r"=>2, "i"=>0} + # y = Complex(2.0, 4).as_json # => {"json_class"=>"Complex", "r"=>2.0, "i"=>4} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Complex object: + # + # Complex.json_create(x) # => (2+0i) + # Complex.json_create(y) # => (2.0+4i) + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'r' => real, + 'i' => imag, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/complex' + # puts Complex(2).to_json + # puts Complex(2.0, 4).to_json + # + # Output: + # + # {"json_class":"Complex","r":2,"i":0} + # {"json_class":"Complex","r":2.0,"i":4} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/core.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/core.rb new file mode 100644 index 000000000..485f097ff --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/core.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true +# This file requires the implementations of ruby core's custom objects for +# serialisation/deserialisation. + +require 'json/add/date' +require 'json/add/date_time' +require 'json/add/exception' +require 'json/add/range' +require 'json/add/regexp' +require 'json/add/struct' +require 'json/add/symbol' +require 'json/add/time' diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date.rb new file mode 100644 index 000000000..66965d491 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end +require 'date' + +class Date + + # See #as_json. + def self.json_create(object) + civil(*object.values_at('y', 'm', 'd', 'sg')) + end + + alias start sg unless method_defined?(:start) + + # Methods Date#as_json and +Date.json_create+ may be used + # to serialize and deserialize a \Date object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Date#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/date' + # x = Date.today.as_json + # # => {"json_class"=>"Date", "y"=>2023, "m"=>11, "d"=>21, "sg"=>2299161.0} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Date object: + # + # Date.json_create(x) + # # => # + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'y' => year, + 'm' => month, + 'd' => day, + 'sg' => start, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/date' + # puts Date.today.to_json + # + # Output: + # + # {"json_class":"Date","y":2023,"m":11,"d":21,"sg":2299161.0} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date_time.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date_time.rb new file mode 100644 index 000000000..569f6ec17 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/date_time.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end +require 'date' + +class DateTime + + # See #as_json. + def self.json_create(object) + args = object.values_at('y', 'm', 'd', 'H', 'M', 'S') + of_a, of_b = object['of'].split('/') + if of_b and of_b != '0' + args << Rational(of_a.to_i, of_b.to_i) + else + args << of_a + end + args << object['sg'] + civil(*args) + end + + alias start sg unless method_defined?(:start) + + # Methods DateTime#as_json and +DateTime.json_create+ may be used + # to serialize and deserialize a \DateTime object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method DateTime#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/datetime' + # x = DateTime.now.as_json + # # => {"json_class"=>"DateTime", "y"=>2023, "m"=>11, "d"=>21, "sg"=>2299161.0} + # + # \Method +JSON.create+ deserializes such a hash, returning a \DateTime object: + # + # DateTime.json_create(x) # BUG? Raises Date::Error "invalid date" + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'y' => year, + 'm' => month, + 'd' => day, + 'H' => hour, + 'M' => min, + 'S' => sec, + 'of' => offset.to_s, + 'sg' => start, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/datetime' + # puts DateTime.now.to_json + # + # Output: + # + # {"json_class":"DateTime","y":2023,"m":11,"d":21,"sg":2299161.0} + # + def to_json(*args) + as_json.to_json(*args) + end +end + + diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/exception.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/exception.rb new file mode 100644 index 000000000..5338ff83d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/exception.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Exception + + # See #as_json. + def self.json_create(object) + result = new(object['m']) + result.set_backtrace object['b'] + result + end + + # Methods Exception#as_json and +Exception.json_create+ may be used + # to serialize and deserialize a \Exception object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Exception#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/exception' + # x = Exception.new('Foo').as_json # => {"json_class"=>"Exception", "m"=>"Foo", "b"=>nil} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Exception object: + # + # Exception.json_create(x) # => # + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'm' => message, + 'b' => backtrace, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/exception' + # puts Exception.new('Foo').to_json + # + # Output: + # + # {"json_class":"Exception","m":"Foo","b":null} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/ostruct.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/ostruct.rb new file mode 100644 index 000000000..534403b78 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/ostruct.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end +begin + require 'ostruct' +rescue LoadError +end + +class OpenStruct + + # See #as_json. + def self.json_create(object) + new(object['t'] || object[:t]) + end + + # Methods OpenStruct#as_json and +OpenStruct.json_create+ may be used + # to serialize and deserialize a \OpenStruct object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method OpenStruct#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/ostruct' + # x = OpenStruct.new('name' => 'Rowdy', :age => nil).as_json + # # => {"json_class"=>"OpenStruct", "t"=>{:name=>'Rowdy', :age=>nil}} + # + # \Method +JSON.create+ deserializes such a hash, returning a \OpenStruct object: + # + # OpenStruct.json_create(x) + # # => # + # + def as_json(*) + klass = self.class.name + klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!" + { + JSON.create_id => klass, + 't' => table, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/ostruct' + # puts OpenStruct.new('name' => 'Rowdy', :age => nil).to_json + # + # Output: + # + # {"json_class":"OpenStruct","t":{'name':'Rowdy',"age":null}} + # + def to_json(*args) + as_json.to_json(*args) + end +end if defined?(::OpenStruct) diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/range.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/range.rb new file mode 100644 index 000000000..eb4b29a8e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/range.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Range + + # See #as_json. + def self.json_create(object) + new(*object['a']) + end + + # Methods Range#as_json and +Range.json_create+ may be used + # to serialize and deserialize a \Range object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Range#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/range' + # x = (1..4).as_json # => {"json_class"=>"Range", "a"=>[1, 4, false]} + # y = (1...4).as_json # => {"json_class"=>"Range", "a"=>[1, 4, true]} + # z = ('a'..'d').as_json # => {"json_class"=>"Range", "a"=>["a", "d", false]} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Range object: + # + # Range.json_create(x) # => 1..4 + # Range.json_create(y) # => 1...4 + # Range.json_create(z) # => "a".."d" + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'a' => [ first, last, exclude_end? ] + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/range' + # puts (1..4).to_json + # puts (1...4).to_json + # puts ('a'..'d').to_json + # + # Output: + # + # {"json_class":"Range","a":[1,4,false]} + # {"json_class":"Range","a":[1,4,true]} + # {"json_class":"Range","a":["a","d",false]} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/rational.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/rational.rb new file mode 100644 index 000000000..1eb231474 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/rational.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Rational + + # See #as_json. + def self.json_create(object) + Rational(object['n'], object['d']) + end + + # Methods Rational#as_json and +Rational.json_create+ may be used + # to serialize and deserialize a \Rational object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Rational#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/rational' + # x = Rational(2, 3).as_json + # # => {"json_class"=>"Rational", "n"=>2, "d"=>3} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Rational object: + # + # Rational.json_create(x) + # # => (2/3) + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'n' => numerator, + 'd' => denominator, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/rational' + # puts Rational(2, 3).to_json + # + # Output: + # + # {"json_class":"Rational","n":2,"d":3} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/regexp.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/regexp.rb new file mode 100644 index 000000000..f033dd1de --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/regexp.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Regexp + + # See #as_json. + def self.json_create(object) + new(object['s'], object['o']) + end + + # Methods Regexp#as_json and +Regexp.json_create+ may be used + # to serialize and deserialize a \Regexp object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Regexp#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/regexp' + # x = /foo/.as_json + # # => {"json_class"=>"Regexp", "o"=>0, "s"=>"foo"} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Regexp object: + # + # Regexp.json_create(x) # => /foo/ + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'o' => options, + 's' => source, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/regexp' + # puts /foo/.to_json + # + # Output: + # + # {"json_class":"Regexp","o":0,"s":"foo"} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/set.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/set.rb new file mode 100644 index 000000000..c521d8b90 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/set.rb @@ -0,0 +1,48 @@ +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end +defined?(::Set) or require 'set' + +class Set + + # See #as_json. + def self.json_create(object) + new object['a'] + end + + # Methods Set#as_json and +Set.json_create+ may be used + # to serialize and deserialize a \Set object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Set#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/set' + # x = Set.new(%w/foo bar baz/).as_json + # # => {"json_class"=>"Set", "a"=>["foo", "bar", "baz"]} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Set object: + # + # Set.json_create(x) # => # + # + def as_json(*) + { + JSON.create_id => self.class.name, + 'a' => to_a, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/set' + # puts Set.new(%w/foo bar baz/).to_json + # + # Output: + # + # {"json_class":"Set","a":["foo","bar","baz"]} + # + def to_json(*args) + as_json.to_json(*args) + end +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/struct.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/struct.rb new file mode 100644 index 000000000..98c38d326 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/struct.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Struct + + # See #as_json. + def self.json_create(object) + new(*object['v']) + end + + # Methods Struct#as_json and +Struct.json_create+ may be used + # to serialize and deserialize a \Struct object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Struct#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/struct' + # Customer = Struct.new('Customer', :name, :address, :zip) + # x = Struct::Customer.new.as_json + # # => {"json_class"=>"Struct::Customer", "v"=>[nil, nil, nil]} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Struct object: + # + # Struct::Customer.json_create(x) + # # => # + # + def as_json(*) + klass = self.class.name + klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!" + { + JSON.create_id => klass, + 'v' => values, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/struct' + # Customer = Struct.new('Customer', :name, :address, :zip) + # puts Struct::Customer.new.to_json + # + # Output: + # + # {"json_class":"Struct","t":{'name':'Rowdy',"age":null}} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/symbol.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/symbol.rb new file mode 100644 index 000000000..20dd59485 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/symbol.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Symbol + + # Methods Symbol#as_json and +Symbol.json_create+ may be used + # to serialize and deserialize a \Symbol object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Symbol#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/symbol' + # x = :foo.as_json + # # => {"json_class"=>"Symbol", "s"=>"foo"} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Symbol object: + # + # Symbol.json_create(x) # => :foo + # + def as_json(*) + { + JSON.create_id => self.class.name, + 's' => to_s, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/symbol' + # puts :foo.to_json + # + # Output: + # + # # {"json_class":"Symbol","s":"foo"} + # + def to_json(state = nil, *a) + state = ::JSON::State.from_state(state) + if state.strict? + super + else + as_json.to_json(state, *a) + end + end + + # See #as_json. + def self.json_create(o) + o['s'].to_sym + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/time.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/time.rb new file mode 100644 index 000000000..05a1f242f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/add/time.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true +unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED + require 'json' +end + +class Time + + # See #as_json. + def self.json_create(object) + if usec = object.delete('u') # used to be tv_usec -> tv_nsec + object['n'] = usec * 1000 + end + at(object['s'], Rational(object['n'], 1000)) + end + + # Methods Time#as_json and +Time.json_create+ may be used + # to serialize and deserialize a \Time object; + # see Marshal[https://docs.ruby-lang.org/en/master/Marshal.html]. + # + # \Method Time#as_json serializes +self+, + # returning a 2-element hash representing +self+: + # + # require 'json/add/time' + # x = Time.now.as_json + # # => {"json_class"=>"Time", "s"=>1700931656, "n"=>472846644} + # + # \Method +JSON.create+ deserializes such a hash, returning a \Time object: + # + # Time.json_create(x) + # # => 2023-11-25 11:00:56.472846644 -0600 + # + def as_json(*) + { + JSON.create_id => self.class.name, + 's' => tv_sec, + 'n' => tv_nsec, + } + end + + # Returns a JSON string representing +self+: + # + # require 'json/add/time' + # puts Time.now.to_json + # + # Output: + # + # {"json_class":"Time","s":1700931678,"n":980650786} + # + def to_json(*args) + as_json.to_json(*args) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/common.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/common.rb new file mode 100644 index 000000000..9a878cead --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/common.rb @@ -0,0 +1,1105 @@ +# frozen_string_literal: true + +require 'json/version' + +module JSON + autoload :GenericObject, 'json/generic_object' + + module ParserOptions # :nodoc: + class << self + def prepare(opts) + if opts[:object_class] || opts[:array_class] + opts = opts.dup + on_load = opts[:on_load] + + on_load = object_class_proc(opts[:object_class], on_load) if opts[:object_class] + on_load = array_class_proc(opts[:array_class], on_load) if opts[:array_class] + opts[:on_load] = on_load + end + + if opts.fetch(:create_additions, false) != false + opts = create_additions_proc(opts) + end + + opts + end + + private + + def object_class_proc(object_class, on_load) + ->(obj) do + if Hash === obj + object = object_class.new + obj.each { |k, v| object[k] = v } + obj = object + end + on_load.nil? ? obj : on_load.call(obj) + end + end + + def array_class_proc(array_class, on_load) + ->(obj) do + if Array === obj + array = array_class.new + obj.each { |v| array << v } + obj = array + end + on_load.nil? ? obj : on_load.call(obj) + end + end + + # TODO: extract :create_additions support to another gem for version 3.0 + def create_additions_proc(opts) + if opts[:symbolize_names] + raise ArgumentError, "options :symbolize_names and :create_additions cannot be used in conjunction" + end + + opts = opts.dup + create_additions = opts.fetch(:create_additions, false) + on_load = opts[:on_load] + object_class = opts[:object_class] || Hash + + opts[:on_load] = ->(object) do + case object + when String + opts[:match_string]&.each do |pattern, klass| + if match = pattern.match(object) + create_additions_warning if create_additions.nil? + object = klass.json_create(object) + break + end + end + when object_class + if opts[:create_additions] != false + if class_name = object[JSON.create_id] + klass = JSON.deep_const_get(class_name) + if (klass.respond_to?(:json_creatable?) && klass.json_creatable?) || klass.respond_to?(:json_create) + create_additions_warning if create_additions.nil? + object = klass.json_create(object) + end + end + end + end + + on_load.nil? ? object : on_load.call(object) + end + + opts + end + + def create_additions_warning + JSON.deprecation_warning "JSON.load implicit support for `create_additions: true` is deprecated " \ + "and will be removed in 3.0, use JSON.unsafe_load or explicitly " \ + "pass `create_additions: true`" + end + end + end + + class << self + def deprecation_warning(message, uplevel = 3) # :nodoc: + gem_root = File.expand_path("../../../", __FILE__) + "/" + caller_locations(uplevel, 10).each do |frame| + if frame.path.nil? || frame.path.start_with?(gem_root) || frame.path.end_with?("/truffle/cext_ruby.rb", ".c") + uplevel += 1 + else + break + end + end + + if RUBY_VERSION >= "3.0" + warn(message, uplevel: uplevel, category: :deprecated) + else + warn(message, uplevel: uplevel) + end + end + + # :call-seq: + # JSON[object] -> new_array or new_string + # + # If +object+ is a \String, + # calls JSON.parse with +object+ and +opts+ (see method #parse): + # json = '[0, 1, null]' + # JSON[json]# => [0, 1, nil] + # + # Otherwise, calls JSON.generate with +object+ and +opts+ (see method #generate): + # ruby = [0, 1, nil] + # JSON[ruby] # => '[0,1,null]' + def [](object, opts = nil) + if object.is_a?(String) + return JSON.parse(object, opts) + elsif object.respond_to?(:to_str) + str = object.to_str + if str.is_a?(String) + return JSON.parse(str, opts) + end + end + + JSON.generate(object, opts) + end + + # Returns the JSON parser class that is used by JSON. + attr_reader :parser + + # Set the JSON parser class _parser_ to be used by JSON. + def parser=(parser) # :nodoc: + @parser = parser + remove_const :Parser if const_defined?(:Parser, false) + const_set :Parser, parser + end + + # Return the constant located at _path_. The format of _path_ has to be + # either ::A::B::C or A::B::C. In any case, A has to be located at the top + # level (absolute namespace path?). If there doesn't exist a constant at + # the given path, an ArgumentError is raised. + def deep_const_get(path) # :nodoc: + Object.const_get(path) + rescue NameError => e + raise ArgumentError, "can't get const #{path}: #{e}" + end + + # Set the module _generator_ to be used by JSON. + def generator=(generator) # :nodoc: + old, $VERBOSE = $VERBOSE, nil + @generator = generator + generator_methods = generator::GeneratorMethods + for const in generator_methods.constants + klass = const_get(const) + modul = generator_methods.const_get(const) + klass.class_eval do + instance_methods(false).each do |m| + m.to_s == 'to_json' and remove_method m + end + include modul + end + end + self.state = generator::State + const_set :State, state + ensure + $VERBOSE = old + end + + # Returns the JSON generator module that is used by JSON. + attr_reader :generator + + # Sets or Returns the JSON generator state class that is used by JSON. + attr_accessor :state + + private + + def deprecated_singleton_attr_accessor(*attrs) + args = RUBY_VERSION >= "3.0" ? ", category: :deprecated" : "" + attrs.each do |attr| + singleton_class.class_eval <<~RUBY + def #{attr} + warn "JSON.#{attr} is deprecated and will be removed in json 3.0.0", uplevel: 1 #{args} + @#{attr} + end + + def #{attr}=(val) + warn "JSON.#{attr}= is deprecated and will be removed in json 3.0.0", uplevel: 1 #{args} + @#{attr} = val + end + + def _#{attr} + @#{attr} + end + RUBY + end + end + end + + # Sets create identifier, which is used to decide if the _json_create_ + # hook of a class should be called; initial value is +json_class+: + # JSON.create_id # => 'json_class' + def self.create_id=(new_value) + Thread.current[:"JSON.create_id"] = new_value.dup.freeze + end + + # Returns the current create identifier. + # See also JSON.create_id=. + def self.create_id + Thread.current[:"JSON.create_id"] || 'json_class' + end + + NaN = Float::NAN + + Infinity = Float::INFINITY + + MinusInfinity = -Infinity + + # The base exception for JSON errors. + class JSONError < StandardError; end + + # This exception is raised if a parser error occurs. + class ParserError < JSONError + attr_reader :line, :column + end + + # This exception is raised if the nesting of parsed data structures is too + # deep. + class NestingError < ParserError; end + + # This exception is raised if a generator or unparser error occurs. + class GeneratorError < JSONError + attr_reader :invalid_object + + def initialize(message, invalid_object = nil) + super(message) + @invalid_object = invalid_object + end + + def detailed_message(...) + # Exception#detailed_message doesn't exist until Ruby 3.2 + super_message = defined?(super) ? super : message + + if @invalid_object.nil? + super_message + else + "#{super_message}\nInvalid object: #{@invalid_object.inspect}" + end + end + end + + # Fragment of JSON document that is to be included as is: + # fragment = JSON::Fragment.new("[1, 2, 3]") + # JSON.generate({ count: 3, items: fragments }) + # + # This allows to easily assemble multiple JSON fragments that have + # been persisted somewhere without having to parse them nor resorting + # to string interpolation. + # + # Note: no validation is performed on the provided string. It is the + # responsibility of the caller to ensure the string contains valid JSON. + Fragment = Struct.new(:json) do + def initialize(json) + unless string = String.try_convert(json) + raise TypeError, " no implicit conversion of #{json.class} into String" + end + + super(string) + end + + def to_json(state = nil, *) + json + end + end + + module_function + + # :call-seq: + # JSON.parse(source, opts) -> object + # + # Returns the Ruby objects created by parsing the given +source+. + # + # Argument +source+ contains the \String to be parsed. + # + # Argument +opts+, if given, contains a \Hash of options for the parsing. + # See {Parsing Options}[#module-JSON-label-Parsing+Options]. + # + # --- + # + # When +source+ is a \JSON array, returns a Ruby \Array: + # source = '["foo", 1.0, true, false, null]' + # ruby = JSON.parse(source) + # ruby # => ["foo", 1.0, true, false, nil] + # ruby.class # => Array + # + # When +source+ is a \JSON object, returns a Ruby \Hash: + # source = '{"a": "foo", "b": 1.0, "c": true, "d": false, "e": null}' + # ruby = JSON.parse(source) + # ruby # => {"a"=>"foo", "b"=>1.0, "c"=>true, "d"=>false, "e"=>nil} + # ruby.class # => Hash + # + # For examples of parsing for all \JSON data types, see + # {Parsing \JSON}[#module-JSON-label-Parsing+JSON]. + # + # Parses nested JSON objects: + # source = <<~JSON + # { + # "name": "Dave", + # "age" :40, + # "hats": [ + # "Cattleman's", + # "Panama", + # "Tophat" + # ] + # } + # JSON + # ruby = JSON.parse(source) + # ruby # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # --- + # + # Raises an exception if +source+ is not valid JSON: + # # Raises JSON::ParserError (783: unexpected token at ''): + # JSON.parse('') + # + def parse(source, opts = nil) + opts = ParserOptions.prepare(opts) unless opts.nil? + Parser.parse(source, opts) + end + + PARSE_L_OPTIONS = { + max_nesting: false, + allow_nan: true, + }.freeze + private_constant :PARSE_L_OPTIONS + + # :call-seq: + # JSON.parse!(source, opts) -> object + # + # Calls + # parse(source, opts) + # with +source+ and possibly modified +opts+. + # + # Differences from JSON.parse: + # - Option +max_nesting+, if not provided, defaults to +false+, + # which disables checking for nesting depth. + # - Option +allow_nan+, if not provided, defaults to +true+. + def parse!(source, opts = nil) + if opts.nil? + parse(source, PARSE_L_OPTIONS) + else + parse(source, PARSE_L_OPTIONS.merge(opts)) + end + end + + # :call-seq: + # JSON.load_file(path, opts={}) -> object + # + # Calls: + # parse(File.read(path), opts) + # + # See method #parse. + def load_file(filespec, opts = nil) + parse(File.read(filespec, encoding: Encoding::UTF_8), opts) + end + + # :call-seq: + # JSON.load_file!(path, opts = {}) + # + # Calls: + # JSON.parse!(File.read(path, opts)) + # + # See method #parse! + def load_file!(filespec, opts = nil) + parse!(File.read(filespec, encoding: Encoding::UTF_8), opts) + end + + # :call-seq: + # JSON.generate(obj, opts = nil) -> new_string + # + # Returns a \String containing the generated \JSON data. + # + # See also JSON.fast_generate, JSON.pretty_generate. + # + # Argument +obj+ is the Ruby object to be converted to \JSON. + # + # Argument +opts+, if given, contains a \Hash of options for the generation. + # See {Generating Options}[#module-JSON-label-Generating+Options]. + # + # --- + # + # When +obj+ is an \Array, returns a \String containing a \JSON array: + # obj = ["foo", 1.0, true, false, nil] + # json = JSON.generate(obj) + # json # => '["foo",1.0,true,false,null]' + # + # When +obj+ is a \Hash, returns a \String containing a \JSON object: + # obj = {foo: 0, bar: 's', baz: :bat} + # json = JSON.generate(obj) + # json # => '{"foo":0,"bar":"s","baz":"bat"}' + # + # For examples of generating from other Ruby objects, see + # {Generating \JSON from Other Objects}[#module-JSON-label-Generating+JSON+from+Other+Objects]. + # + # --- + # + # Raises an exception if any formatting option is not a \String. + # + # Raises an exception if +obj+ contains circular references: + # a = []; b = []; a.push(b); b.push(a) + # # Raises JSON::NestingError (nesting of 100 is too deep): + # JSON.generate(a) + # + def generate(obj, opts = nil) + if State === opts + opts.generate(obj) + else + State.generate(obj, opts, nil) + end + end + + # :call-seq: + # JSON.fast_generate(obj, opts) -> new_string + # + # Arguments +obj+ and +opts+ here are the same as + # arguments +obj+ and +opts+ in JSON.generate. + # + # By default, generates \JSON data without checking + # for circular references in +obj+ (option +max_nesting+ set to +false+, disabled). + # + # Raises an exception if +obj+ contains circular references: + # a = []; b = []; a.push(b); b.push(a) + # # Raises SystemStackError (stack level too deep): + # JSON.fast_generate(a) + def fast_generate(obj, opts = nil) + if RUBY_VERSION >= "3.0" + warn "JSON.fast_generate is deprecated and will be removed in json 3.0.0, just use JSON.generate", uplevel: 1, category: :deprecated + else + warn "JSON.fast_generate is deprecated and will be removed in json 3.0.0, just use JSON.generate", uplevel: 1 + end + generate(obj, opts) + end + + PRETTY_GENERATE_OPTIONS = { + indent: ' ', + space: ' ', + object_nl: "\n", + array_nl: "\n", + }.freeze + private_constant :PRETTY_GENERATE_OPTIONS + + # :call-seq: + # JSON.pretty_generate(obj, opts = nil) -> new_string + # + # Arguments +obj+ and +opts+ here are the same as + # arguments +obj+ and +opts+ in JSON.generate. + # + # Default options are: + # { + # indent: ' ', # Two spaces + # space: ' ', # One space + # array_nl: "\n", # Newline + # object_nl: "\n" # Newline + # } + # + # Example: + # obj = {foo: [:bar, :baz], bat: {bam: 0, bad: 1}} + # json = JSON.pretty_generate(obj) + # puts json + # Output: + # { + # "foo": [ + # "bar", + # "baz" + # ], + # "bat": { + # "bam": 0, + # "bad": 1 + # } + # } + # + def pretty_generate(obj, opts = nil) + return opts.generate(obj) if State === opts + + options = PRETTY_GENERATE_OPTIONS + + if opts + unless opts.is_a?(Hash) + if opts.respond_to? :to_hash + opts = opts.to_hash + elsif opts.respond_to? :to_h + opts = opts.to_h + else + raise TypeError, "can't convert #{opts.class} into Hash" + end + end + options = options.merge(opts) + end + + State.generate(obj, options, nil) + end + + # Sets or returns default options for the JSON.unsafe_load method. + # Initially: + # opts = JSON.load_default_options + # opts # => {:max_nesting=>false, :allow_nan=>true, :allow_blank=>true, :create_additions=>true} + deprecated_singleton_attr_accessor :unsafe_load_default_options + + @unsafe_load_default_options = { + :max_nesting => false, + :allow_nan => true, + :allow_blank => true, + :create_additions => true, + } + + # Sets or returns default options for the JSON.load method. + # Initially: + # opts = JSON.load_default_options + # opts # => {:max_nesting=>false, :allow_nan=>true, :allow_blank=>true, :create_additions=>true} + deprecated_singleton_attr_accessor :load_default_options + + @load_default_options = { + :allow_nan => true, + :allow_blank => true, + :create_additions => nil, + } + # :call-seq: + # JSON.unsafe_load(source, proc = nil, options = {}) -> object + # + # Returns the Ruby objects created by parsing the given +source+. + # + # BEWARE: This method is meant to serialise data from trusted user input, + # like from your own database server or clients under your control, it could + # be dangerous to allow untrusted users to pass JSON sources into it. + # + # - Argument +source+ must be, or be convertible to, a \String: + # - If +source+ responds to instance method +to_str+, + # source.to_str becomes the source. + # - If +source+ responds to instance method +to_io+, + # source.to_io.read becomes the source. + # - If +source+ responds to instance method +read+, + # source.read becomes the source. + # - If both of the following are true, source becomes the \String 'null': + # - Option +allow_blank+ specifies a truthy value. + # - The source, as defined above, is +nil+ or the empty \String ''. + # - Otherwise, +source+ remains the source. + # - Argument +proc+, if given, must be a \Proc that accepts one argument. + # It will be called recursively with each result (depth-first order). + # See details below. + # - Argument +opts+, if given, contains a \Hash of options for the parsing. + # See {Parsing Options}[#module-JSON-label-Parsing+Options]. + # The default options can be changed via method JSON.unsafe_load_default_options=. + # + # --- + # + # When no +proc+ is given, modifies +source+ as above and returns the result of + # parse(source, opts); see #parse. + # + # Source for following examples: + # source = <<~JSON + # { + # "name": "Dave", + # "age" :40, + # "hats": [ + # "Cattleman's", + # "Panama", + # "Tophat" + # ] + # } + # JSON + # + # Load a \String: + # ruby = JSON.unsafe_load(source) + # ruby # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # Load an \IO object: + # require 'stringio' + # object = JSON.unsafe_load(StringIO.new(source)) + # object # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # Load a \File object: + # path = 't.json' + # File.write(path, source) + # File.open(path) do |file| + # JSON.unsafe_load(file) + # end # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # --- + # + # When +proc+ is given: + # - Modifies +source+ as above. + # - Gets the +result+ from calling parse(source, opts). + # - Recursively calls proc(result). + # - Returns the final result. + # + # Example: + # require 'json' + # + # # Some classes for the example. + # class Base + # def initialize(attributes) + # @attributes = attributes + # end + # end + # class User < Base; end + # class Account < Base; end + # class Admin < Base; end + # # The JSON source. + # json = <<-EOF + # { + # "users": [ + # {"type": "User", "username": "jane", "email": "jane@example.com"}, + # {"type": "User", "username": "john", "email": "john@example.com"} + # ], + # "accounts": [ + # {"account": {"type": "Account", "paid": true, "account_id": "1234"}}, + # {"account": {"type": "Account", "paid": false, "account_id": "1235"}} + # ], + # "admins": {"type": "Admin", "password": "0wn3d"} + # } + # EOF + # # Deserializer method. + # def deserialize_obj(obj, safe_types = %w(User Account Admin)) + # type = obj.is_a?(Hash) && obj["type"] + # safe_types.include?(type) ? Object.const_get(type).new(obj) : obj + # end + # # Call to JSON.unsafe_load + # ruby = JSON.unsafe_load(json, proc {|obj| + # case obj + # when Hash + # obj.each {|k, v| obj[k] = deserialize_obj v } + # when Array + # obj.map! {|v| deserialize_obj v } + # end + # }) + # pp ruby + # Output: + # {"users"=> + # [#"User", "username"=>"jane", "email"=>"jane@example.com"}>, + # #"User", "username"=>"john", "email"=>"john@example.com"}>], + # "accounts"=> + # [{"account"=> + # #"Account", "paid"=>true, "account_id"=>"1234"}>}, + # {"account"=> + # #"Account", "paid"=>false, "account_id"=>"1235"}>}], + # "admins"=> + # #"Admin", "password"=>"0wn3d"}>} + # + def unsafe_load(source, proc = nil, options = nil) + opts = if options.nil? + _unsafe_load_default_options + else + _unsafe_load_default_options.merge(options) + end + + unless source.is_a?(String) + if source.respond_to? :to_str + source = source.to_str + elsif source.respond_to? :to_io + source = source.to_io.read + elsif source.respond_to?(:read) + source = source.read + end + end + + if opts[:allow_blank] && (source.nil? || source.empty?) + source = 'null' + end + result = parse(source, opts) + recurse_proc(result, &proc) if proc + result + end + + # :call-seq: + # JSON.load(source, proc = nil, options = {}) -> object + # + # Returns the Ruby objects created by parsing the given +source+. + # + # BEWARE: This method is meant to serialise data from trusted user input, + # like from your own database server or clients under your control, it could + # be dangerous to allow untrusted users to pass JSON sources into it. + # If you must use it, use JSON.unsafe_load instead to make it clear. + # + # Since JSON version 2.8.0, `load` emits a deprecation warning when a + # non native type is deserialized, without `create_additions` being explicitly + # enabled, and in JSON version 3.0, `load` will have `create_additions` disabled + # by default. + # + # - Argument +source+ must be, or be convertible to, a \String: + # - If +source+ responds to instance method +to_str+, + # source.to_str becomes the source. + # - If +source+ responds to instance method +to_io+, + # source.to_io.read becomes the source. + # - If +source+ responds to instance method +read+, + # source.read becomes the source. + # - If both of the following are true, source becomes the \String 'null': + # - Option +allow_blank+ specifies a truthy value. + # - The source, as defined above, is +nil+ or the empty \String ''. + # - Otherwise, +source+ remains the source. + # - Argument +proc+, if given, must be a \Proc that accepts one argument. + # It will be called recursively with each result (depth-first order). + # See details below. + # - Argument +opts+, if given, contains a \Hash of options for the parsing. + # See {Parsing Options}[#module-JSON-label-Parsing+Options]. + # The default options can be changed via method JSON.load_default_options=. + # + # --- + # + # When no +proc+ is given, modifies +source+ as above and returns the result of + # parse(source, opts); see #parse. + # + # Source for following examples: + # source = <<~JSON + # { + # "name": "Dave", + # "age" :40, + # "hats": [ + # "Cattleman's", + # "Panama", + # "Tophat" + # ] + # } + # JSON + # + # Load a \String: + # ruby = JSON.load(source) + # ruby # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # Load an \IO object: + # require 'stringio' + # object = JSON.load(StringIO.new(source)) + # object # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # Load a \File object: + # path = 't.json' + # File.write(path, source) + # File.open(path) do |file| + # JSON.load(file) + # end # => {"name"=>"Dave", "age"=>40, "hats"=>["Cattleman's", "Panama", "Tophat"]} + # + # --- + # + # When +proc+ is given: + # - Modifies +source+ as above. + # - Gets the +result+ from calling parse(source, opts). + # - Recursively calls proc(result). + # - Returns the final result. + # + # Example: + # require 'json' + # + # # Some classes for the example. + # class Base + # def initialize(attributes) + # @attributes = attributes + # end + # end + # class User < Base; end + # class Account < Base; end + # class Admin < Base; end + # # The JSON source. + # json = <<-EOF + # { + # "users": [ + # {"type": "User", "username": "jane", "email": "jane@example.com"}, + # {"type": "User", "username": "john", "email": "john@example.com"} + # ], + # "accounts": [ + # {"account": {"type": "Account", "paid": true, "account_id": "1234"}}, + # {"account": {"type": "Account", "paid": false, "account_id": "1235"}} + # ], + # "admins": {"type": "Admin", "password": "0wn3d"} + # } + # EOF + # # Deserializer method. + # def deserialize_obj(obj, safe_types = %w(User Account Admin)) + # type = obj.is_a?(Hash) && obj["type"] + # safe_types.include?(type) ? Object.const_get(type).new(obj) : obj + # end + # # Call to JSON.load + # ruby = JSON.load(json, proc {|obj| + # case obj + # when Hash + # obj.each {|k, v| obj[k] = deserialize_obj v } + # when Array + # obj.map! {|v| deserialize_obj v } + # end + # }) + # pp ruby + # Output: + # {"users"=> + # [#"User", "username"=>"jane", "email"=>"jane@example.com"}>, + # #"User", "username"=>"john", "email"=>"john@example.com"}>], + # "accounts"=> + # [{"account"=> + # #"Account", "paid"=>true, "account_id"=>"1234"}>}, + # {"account"=> + # #"Account", "paid"=>false, "account_id"=>"1235"}>}], + # "admins"=> + # #"Admin", "password"=>"0wn3d"}>} + # + def load(source, proc = nil, options = nil) + opts = if options.nil? + _load_default_options + else + _load_default_options.merge(options) + end + + unless source.is_a?(String) + if source.respond_to? :to_str + source = source.to_str + elsif source.respond_to? :to_io + source = source.to_io.read + elsif source.respond_to?(:read) + source = source.read + end + end + + if opts[:allow_blank] && (source.nil? || source.empty?) + source = 'null' + end + + if proc + opts = opts.dup + opts[:on_load] = proc.to_proc + end + + parse(source, opts) + end + + # Sets or returns the default options for the JSON.dump method. + # Initially: + # opts = JSON.dump_default_options + # opts # => {:max_nesting=>false, :allow_nan=>true} + deprecated_singleton_attr_accessor :dump_default_options + @dump_default_options = { + :max_nesting => false, + :allow_nan => true, + } + + # :call-seq: + # JSON.dump(obj, io = nil, limit = nil) + # + # Dumps +obj+ as a \JSON string, i.e. calls generate on the object and returns the result. + # + # The default options can be changed via method JSON.dump_default_options. + # + # - Argument +io+, if given, should respond to method +write+; + # the \JSON \String is written to +io+, and +io+ is returned. + # If +io+ is not given, the \JSON \String is returned. + # - Argument +limit+, if given, is passed to JSON.generate as option +max_nesting+. + # + # --- + # + # When argument +io+ is not given, returns the \JSON \String generated from +obj+: + # obj = {foo: [0, 1], bar: {baz: 2, bat: 3}, bam: :bad} + # json = JSON.dump(obj) + # json # => "{\"foo\":[0,1],\"bar\":{\"baz\":2,\"bat\":3},\"bam\":\"bad\"}" + # + # When argument +io+ is given, writes the \JSON \String to +io+ and returns +io+: + # path = 't.json' + # File.open(path, 'w') do |file| + # JSON.dump(obj, file) + # end # => # + # puts File.read(path) + # Output: + # {"foo":[0,1],"bar":{"baz":2,"bat":3},"bam":"bad"} + def dump(obj, anIO = nil, limit = nil, kwargs = nil) + if kwargs.nil? + if limit.nil? + if anIO.is_a?(Hash) + kwargs = anIO + anIO = nil + end + elsif limit.is_a?(Hash) + kwargs = limit + limit = nil + end + end + + unless anIO.nil? + if anIO.respond_to?(:to_io) + anIO = anIO.to_io + elsif limit.nil? && !anIO.respond_to?(:write) + anIO, limit = nil, anIO + end + end + + opts = JSON._dump_default_options + opts = opts.merge(:max_nesting => limit) if limit + opts = opts.merge(kwargs) if kwargs + + begin + State.generate(obj, opts, anIO) + rescue JSON::NestingError + raise ArgumentError, "exceed depth limit" + end + end + + # :stopdoc: + # All these were meant to be deprecated circa 2009, but were just set as undocumented + # so usage still exist in the wild. + def unparse(...) + if RUBY_VERSION >= "3.0" + warn "JSON.unparse is deprecated and will be removed in json 3.0.0, just use JSON.generate", uplevel: 1, category: :deprecated + else + warn "JSON.unparse is deprecated and will be removed in json 3.0.0, just use JSON.generate", uplevel: 1 + end + generate(...) + end + module_function :unparse + + def fast_unparse(...) + if RUBY_VERSION >= "3.0" + warn "JSON.fast_unparse is deprecated and will be removed in json 3.0.0, just use JSON.generate", uplevel: 1, category: :deprecated + else + warn "JSON.fast_unparse is deprecated and will be removed in json 3.0.0, just use JSON.generate", uplevel: 1 + end + generate(...) + end + module_function :fast_unparse + + def pretty_unparse(...) + if RUBY_VERSION >= "3.0" + warn "JSON.pretty_unparse is deprecated and will be removed in json 3.0.0, just use JSON.pretty_generate", uplevel: 1, category: :deprecated + else + warn "JSON.pretty_unparse is deprecated and will be removed in json 3.0.0, just use JSON.pretty_generate", uplevel: 1 + end + pretty_generate(...) + end + module_function :fast_unparse + + def restore(...) + if RUBY_VERSION >= "3.0" + warn "JSON.restore is deprecated and will be removed in json 3.0.0, just use JSON.load", uplevel: 1, category: :deprecated + else + warn "JSON.restore is deprecated and will be removed in json 3.0.0, just use JSON.load", uplevel: 1 + end + load(...) + end + module_function :restore + + class << self + private + + def const_missing(const_name) + case const_name + when :PRETTY_STATE_PROTOTYPE + if RUBY_VERSION >= "3.0" + warn "JSON::PRETTY_STATE_PROTOTYPE is deprecated and will be removed in json 3.0.0, just use JSON.pretty_generate", uplevel: 1, category: :deprecated + else + warn "JSON::PRETTY_STATE_PROTOTYPE is deprecated and will be removed in json 3.0.0, just use JSON.pretty_generate", uplevel: 1 + end + state.new(PRETTY_GENERATE_OPTIONS) + else + super + end + end + end + # :startdoc: + + # JSON::Coder holds a parser and generator configuration. + # + # module MyApp + # JSONC_CODER = JSON::Coder.new( + # allow_trailing_comma: true + # ) + # end + # + # MyApp::JSONC_CODER.load(document) + # + class Coder + # :call-seq: + # JSON.new(options = nil, &block) + # + # Argument +options+, if given, contains a \Hash of options for both parsing and generating. + # See {Parsing Options}[#module-JSON-label-Parsing+Options], and {Generating Options}[#module-JSON-label-Generating+Options]. + # + # For generation, the strict: true option is always set. When a Ruby object with no native \JSON counterpart is + # encoutered, the block provided to the initialize method is invoked, and must return a Ruby object that has a native + # \JSON counterpart: + # + # module MyApp + # API_JSON_CODER = JSON::Coder.new do |object| + # case object + # when Time + # object.iso8601(3) + # else + # object # Unknown type, will raise + # end + # end + # end + # + # puts MyApp::API_JSON_CODER.dump(Time.now.utc) # => "2025-01-21T08:41:44.286Z" + # + def initialize(options = nil, &as_json) + if options.nil? + options = { strict: true } + else + options = options.dup + options[:strict] = true + end + options[:as_json] = as_json if as_json + + @state = State.new(options).freeze + @parser_config = Ext::Parser::Config.new(ParserOptions.prepare(options)) + end + + # call-seq: + # dump(object) -> String + # dump(object, io) -> io + # + # Serialize the given object into a \JSON document. + def dump(object, io = nil) + @state.generate_new(object, io) + end + alias_method :generate, :dump + + # call-seq: + # load(string) -> Object + # + # Parse the given \JSON document and return an equivalent Ruby object. + def load(source) + @parser_config.parse(source) + end + alias_method :parse, :load + + # call-seq: + # load(path) -> Object + # + # Parse the given \JSON document and return an equivalent Ruby object. + def load_file(path) + load(File.read(path, encoding: Encoding::UTF_8)) + end + end +end + +module ::Kernel + private + + # Outputs _objs_ to STDOUT as JSON strings in the shortest form, that is in + # one line. + def j(*objs) + if RUBY_VERSION >= "3.0" + warn "Kernel#j is deprecated and will be removed in json 3.0.0", uplevel: 1, category: :deprecated + else + warn "Kernel#j is deprecated and will be removed in json 3.0.0", uplevel: 1 + end + + objs.each do |obj| + puts JSON.generate(obj, :allow_nan => true, :max_nesting => false) + end + nil + end + + # Outputs _objs_ to STDOUT as JSON strings in a pretty format, with + # indentation and over many lines. + def jj(*objs) + if RUBY_VERSION >= "3.0" + warn "Kernel#jj is deprecated and will be removed in json 3.0.0", uplevel: 1, category: :deprecated + else + warn "Kernel#jj is deprecated and will be removed in json 3.0.0", uplevel: 1 + end + + objs.each do |obj| + puts JSON.pretty_generate(obj, :allow_nan => true, :max_nesting => false) + end + nil + end + + # If _object_ is string-like, parse the string and return the parsed result as + # a Ruby data structure. Otherwise, generate a JSON text from the Ruby data + # structure object and return it. + # + # The _opts_ argument is passed through to generate/parse respectively. See + # generate and parse for their documentation. + def JSON(object, opts = nil) + JSON[object, opts] + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext.rb new file mode 100644 index 000000000..5bacc5e37 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'json/common' + +module JSON + # This module holds all the modules/classes that implement JSON's + # functionality as C extensions. + module Ext + class Parser + class << self + def parse(...) + new(...).parse + end + alias_method :parse, :parse # Allow redefinition by extensions + end + + def initialize(source, opts = nil) + @source = source + @config = Config.new(opts) + end + + def source + @source.dup + end + + def parse + @config.parse(@source) + end + end + + require 'json/ext/parser' + Ext::Parser::Config = Ext::ParserConfig + JSON.parser = Ext::Parser + + if RUBY_ENGINE == 'truffleruby' + require 'json/truffle_ruby/generator' + JSON.generator = JSON::TruffleRuby::Generator + else + require 'json/ext/generator' + JSON.generator = Generator + end + end + + JSON_LOADED = true unless defined?(JSON::JSON_LOADED) +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext/generator.so b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/ext/generator.so new file mode 100755 index 0000000000000000000000000000000000000000..595371e35fc718f7457cebc811af7fac37f59ff1 GIT binary patch literal 220496 zcmeFadt6ji_dk9H7{wINSejIpQ)x+g0TqRkWEAv_j<=NZDGd<@B@qZ_ykEwKF;3&C zPgz-6Sy|bW-Obd@02R>8NKLIQ&Agl=rDi>dmz?i=?R{qEFiB6p&*$^|TDsp~<8s2o-deQ>!a#nbkS<^zyXfBWF7b@vVH*Xn8KhJPLFeX!fpbvJbdnN`ymgd~^iALT-n zJ%oSB@kyKpUNQIu39r5a<>MysZ$Ysu@vnj6ZLT1v0k|vi-`Irw2O;N5{I4~EpWj5j zLzr_ckGaWE1*ahC^J*4xd5rmGFm~(C3RL@V_)6 z|LrE^uWSPUToZCWZUVm>cDMzy!k-_2xKg>OjhMrk0k>;H{-q}5Ki!0#yEh@He-rqb z$oooqQe!mZ7yg`RBHyh|$R7j&w}cA;9MwcUZfe4>+BCu6v59=AG{HZ;34SNab*1+H z3v|7beI9E&+(k z-vR%X{5H9Xd`CdemH0DI&@1`*!%g_ZV939coIje-|CuKA@idX|MrP;M!b)Pm6L%F4 zh0M%JQws|+OB`9nj?7FU(>i8YW{$nsJ}IxnVJ{vtY+!z2fqhKY#C$uaU73=ZJtHeK zH?JToKW~;@m};MzT{KfDo|u{IEXdBv&!2{DkVE#A%Qxi*E#aVeJEW>FfMaAIDC8q4jc4+OGS!B=2$#P@~#mW>6Nj%I5h|H2#k~t->ASbhsD;{Cu^MI4N>Iz01~F=*33z2z1c&awxL#d4mQRa~6M&68bNFwG7(By($)hFCHXDvTL^ zss?3davvjqqI@7euV7NJf}3X(BA4#|HGZJ0=(A=yE}`o<>zWHJnfa=gGG_&<|>) z{Je?eaNT-!@7cYiusZ?)sHqh1L$Pcg8*H-<7?|0!dqQs^5Dmq8bq~dRc2A`K*3jr* zL?fs%EdWI}$@6zo2c#IVHV?(G6f-uWlcrHZZ!A6o>)2)gd>YF~qi~OgYnzuBLo0r( z20O0HYeB2i(HiW$p5O=}6EHo2{fEH1-*gAZpV9Uu0{E48b9_S>eu9PlBOd7|V!tg~JJ~j-0_#V!04#T@NexX*&hX+CcUiu|R(ApY9a`0KM!^054XQd7= z2C>*T(cy2@;osBYJL&N2b$D$Hh*8UR`0hIX8XZ1Chp*M)lXdt5I{Z)_{;&=|Oovx= z_z^n%86AG44nN@w4m3xADNgX2tixk!6?}?xcuZx2&kP-2b2UbD>F|8tgC{A|;VJLH zvs8x<_!iNY>+pdo6X93t@GS|UJ*##2Rskf!_jLFe9e%wI-&%(+*Wm+gkjOPUynen| ztHbN(od+rOX6nGRJzFh!`@QehNQA z_|7`~1Ref19e%P7-$jQn(&6 z!z(&`V8|iz86Cc#j=w>NPtoB8t=}X6*M=5Gjn?7&>-bGNe5wu~tHWD#c(V>aK!@+5 z!w=Nq<8}DJkVfPr9X?&hpQ^(T(%~f?ey|QdQiqpx_^~>?RfnIT!w=EnC+qMw9ll71 zAFjjC(BVhv@Gc$xULC$nhaauO|8MkvE%3h<_+JbBuLb_!w!m-VO&28h&(V_C_|JE7 zyzMD>MEGkZ_txkwtl<8{JKh!qf9J0FD|Sp3h<`Fs)FXbsKX)mEX~ClI4#BigQ7c0* zEl|`CLNF~1)mK6=EmYJeLNF~*)JH=wO&ZkV5KIdabz%sn1*Lj#2&RRJIv@nof<)~Z zf@vY5-V%ai2);T5wh2Is3l6n11k*x8{U8L>0z-Wz z1k=JoeIf+YfX1k*x7Ee^r7fKVrfU|Kk+_l96vFsK7UFfA0+o*|eP2*9_!L%SycZXnF2&k1Im=*x)2O*di?&>Qcm=*%+6Cs!u z0P3S5_;!MeLoh7_)QKUO769tKA($rq>VObT6MnU42&Rd?dP@kV3BG!D2<}dBL%-tvVes)V_=hlfUl_bA46X`;H;2LN!r*tp;Mc?87sKGE z!{EhX@WL>7UKl(r3@!+RbHm_=!r=SD;NfBLpfLEJFt|?`+&v7wEeyUX48ArDZXE_U z3xhAe9#;Qh@ToBPco_Uc7`!hG-W3K{g~6M{;B{f}J7MtaVepG#@Y7-N;vlThSVf&o zhU7~*{V--g^0Cp9yk}%WxpD?0pd=r7;KA`*ho$!a1M0phiu_H5u*GT^4 zDHlCkG1JLNsF%+`P0Rh3`z#rjF{8y-Kq}phNtJv?e8nPvXOSy_SzNARh`JW>^)xLc zhVlv}O8(p;?{^;-)8$I}OF4aSnp|$FI2m1`w6eI5h@H=XRZg#6@Dsvl(WAhG=?TB2 zC)B4W9JQRQ7F>ehXzq@A7WHNz7zl!8z!3}^MO?k?~}$qh6eiEv!CAm(<5OF@O&qFo^Qq|W>RS$j|-`YXCaPBIMTmK+{m5M3#p># zZjDQb5+HWcQB%f*k=(tGyw*+CVIw!g%#5*dZOW<7WesP;=(zYi?2tag@o@qpKkSA%#=wx$|Qyn%Rx=qW%PvVqOuF_-A+hgf}8s_5xNjFnVd zqNjxxrvm(pQW4&SI8{}BrCIb?5lQ#_g4zIu)IO)B5RlD5EuP9uwCPoFpqS~Nda+c5 zFHorsjI@DBF7l3T%uG&yj){gfn4kCHa1>ycb4L!lU}@N1^bDjTX>JzBRA0$*-p@gEeHCof3JDh+p%Rl)tF(mFdAJ~f`RE4N2R+D}ysk$f!D^T2NkPc! zs52qpq^%K>lXhN>+OEUm4++ST_99we37m-Dh*i`DU|@Mi^lH@FAqM>7XbW~T*vkRC zjzhElkcZaNYTCW)X=SKsu^N=8P|>SVr_^96=Sc{`dCjyg>uF6#ZOZi%GORH&eC#yS zdRm)_uhefr>i5CD9-s>?ubSrEE^*;%1k_OiS6`7TdcKcQM?kO5x+AKPZd2T#67#kvg@HAS?p$##=Y&=e_O5u0+OPxwjkSm|+yHIQP|pAL(W&tcGz9vLghsv=C8BNbzjM>$j$gr4JBt z--6@Fp_%Bh0U|+>a!iyahzz$9f)a>S15qOZ&}i!3(Oho#!%JAuTnmWI;e60LY_B)H zwnc^ee8*`W+-m0q=l=!4(+5hvb}JsiEFwGVami47kN>dyeCui5*tjX}IsB&Ja+yWX zS7>sOFKk8iH4_B)`3}yjs*R<{0kyyfS@i`>0v7C|&O=UofbzZp)>;rV0#PcM`V~NT z%s_x`uIkIUkv&K6?BjzFs2j~KXJ9zm}9de?D61~e2P*6^cC&Uwki0OSDe2buW8Dy3dUm)l>iP=RamJ=5d;yvaT3lOEMRUc>7P|Uz6K`mimHUpyt zbsPi75=d446Vu5`AXU;%$|)uVV5<5tBi+Fg!d5E*RvT{z2tx?&8ozoUitPF+%4@s| zNQ|c_GYyE*G_7=@Pyg*XpiASBA)%vW;}Ia#>zD%j5oH$r0AjuAd%eSItBt#WL|q6i zYTONXi=L0E+v;2aq@TKi<9-Bk;!~1odMG#y+(b3m-`~E(C-6 z2Q;iUPDhk+oB{}rb5j^c4sm?PQ3p9{6rpN3s)D2LA=F1qP7g#|Q%17wdYPl@p9N|vlXC)5w#WPs=Xi;8>?Mw&95sidDhQRtQE?pg4xu`8RBMiU zo={hF)U6yi3+Zo^y;Qjvb664N;8j=#v?k2#`EF zwZ6(EmU`hh)yB@iv7P7TjOA*8Eh&Y}z({~>pL-Sqe_zh;YpYWLq6#+g9Df8_9m`QK zbJQ+Er2~Z_(VMZ?QlYeMQE3s^mAe&#yP<{nm_!Bj#dM#_M#cq_p#~#kwebn0qm6j< z?bCYt2Apv>h~mNr;M4At6WkX@hzo}!UT>TZHkatJFyLMQ7|m(!P;JZra6J{^1Oo>E zL^ps&&cI#(ncrWC5=&>3*bC8^5U>bld>=BVt2HKd6$Aaxz?De^4S?0gGXQDQ$;2aKMQ_l z{5t`97zdcJb4IuR`s)~-b+BgXySy(*}=7jPsVn-Js0UIi9 zo}$2b6$Qx+j&|_Y7ndQ9Rd);jVU176|05B7iQiHXcUi(_NX4oCQbj|gWT=w9y6k8R z0d`G5wEr;Qw+^JIttD5=Yak2GJ2NDA$^eRDJH^pT@}|rKptMl25tD<-VwG_&V8eK% z74+%V=^$!)@vSQC#?VfTBwwVnJJtFDtSlXG_Sej8)=e~zY-s%ku6s& z>^2~mg`J16aN2I*ZXAwy-8FQsC;0}LB>5UCxuV!1$(6r}g#*rjV`r{-dRwmeLrt#e zr~q;ZkdDuRtjrbbDssjB<*?co$u|T$M5DVT?EX#MR4d6t&5~h;tRV{#dtb2Iyi;PW-eD%IH$B!` zoo=Qomf~t8*8I|x@8(UKe~$pcLZNan!vxBe%AGoa2}}u=-g|#<$vfEW**$*{b3k{wscJCX zGPy={D&HP@o{;UemS*1G<`!8`<)y{z(9WfAyUt6}1HvDE1YP!L{PXKP#qfT0d(ja69#89Ea$g)LDwtNaIb zN9-O!%A3er*$PRJ4@-Tp1Q~kDowrH88~-_injD?$ZKk1|w$HsN0cLZHIG5uDv}VPt zQWB0bvnVaV5o}+SAM5wzbJBvJ4=^E5jf6KyvTrN`o9vmu0^ei=rR9!W#W)Y`n2T}4 zTw+|tQZa7)N-@sng;P1YYd&`_fI4?;|sSY7hN2v79_z5S4tM zREDSJemuuhAE3C%dc0nav6g{yYDWu+%P>4bbtV5Nzkj^sO-+@2X@WuXH(sXSFvv_L zHkeUK10Om9xe~UQzG{%pZNX9lE-}7A64NRqc|a2L*zH?%l@aiF99ATD((=1>gy)xZ z<^29A)AHL3K3yi_Q>oS~We_aSR!WHStRVT~x>RfZ0qMGO_u;mU71H=wol*IGf&6m* zl%MpM`Hj)?>#F4^{bhbAzs7&*M_>>D(0*bI5> z!XAq#KiFgFU+NdM$J{`EWB!!ikiX0?P0O!?mfw)SmtRaEzoUo$gMYVI2M`)B1)5TG%kSyBM7RGMi9!+Ys_L@(;>+luXNXv#F5pQ3E4O}{xB?e*W(yQzF<|k zrEX1Vex$cftmJKXFZd{ng!%+O<)MD_J^JD(NT&YKDxa7A7J0WtuCZ3AW~t(&*(zVm zu*utP;=A8k#dmig+)o{Fn*1H2JFFFT<^jGj%~ItWi4qtNd;*KQtVn1xd;`#C!2Ar3 zWi!-D6$%(?^j%)SUhwDNs3W17xOYYuFZSx4M#7fA0St zC1Yaz*?JH=;n>S9ZA(@3)kVqk-MlDC9-B&K*Sdz#c%$`aVRIWeh5LAO(endUBF;QZ zzh(_B(tWrirtdgD*#`jbC8MXFyh&4BA zORqA=&Y-5F&B2;!?dCivL4OX;AE2w`p3N6dl6QU=N4m1~I}$%To+egnVKZ+_KwYHj zfi(CFp6$E>hY^c1hGjP}p63yob1Fk2#Cr`T^z3&2h#GesL|a#sTd97~w6z2Qe^y!o zU$>@_ewO>Jay{DYgaz2_#@y;_%@5?yG39Y#wx@4-G=XeSip$?(0aLz#+P&mHa#3vK zS@s9ezK`yzTg5~AdO$Ss|9CPZ!Wr?Ci-kr{z9>Kj6x zQ(BNCdNXQ*ZYFwuC2bdMC39)6MkkTfpTVgokf=tgv(l({!x~HBZS|F?>P~P0$r};u z%Gwev?^jwOQ|hJfm%PlpjRj$)MRgalrq)u)qE<0|Z@{YZQ=|k9rll&altcZCjT=)F zOMPWEGb=a4S@as$Co+SO#gxSdNK2+jom)8r-2&rNa6O7)%9mI_2ll|ECA?$VybtO5 z(!<#qy_arbI1Vn1{g%Vbeq_5+QUTXTV4Sw_RJ0Ar8|vx}K+(we^L}2g4wwV%-NKvZTrX&@SX#7%%X9jp$ zifv{i7-)JBMdWAI*s1aw-*!P=U*aFB2)sDQR(^(sbsfl3>0C862xoJ)<(NtrOkfbx zw@}I3JlGuw1iRH$}jrwM!H(=GV1xet+zSp1v6SJyF`?^Fz8bwTT?Wt4%mCwO#|* zCLfm%Q4!Sp&~`)i|7(3CG<_qkpzq>4q3=Kb9n$v&RD{kTA$^;kpS?n34UGQEuYb13 zsjDFW{O=+8DgWQ))3`wL2V6n^4>v;ovpV_Tqn;b-&&C?ISNnTlOTyjXVDlDsadcvp zgchWHTuZ7C#PFAmMMUrPxtvDMN-Vy#ABO=YcnS7%XfPmE>cz)=_SUI9qZyPF~JZ%#e+fmw{%It+O`wXKJ?cK4lv^ zZ>+gYbsKbflU=wyN85b%12`?CRj~VKGwsdjr#iHI@#L2zA>#mMA%`XJy(r|c1E?|Q zO^tUmIzR(6vs55|KEA??bzdusQKyX*2-YXO$vYVJc99)CR1Y?{rP9PzO708Shzsw5 zlSsG`TJ{3E87UIJB3IxnEL&XsDG=5Lm--J71#t=X$O#Z{$25G&Nl<7qc~&&VY{ugG ztW~^Y>sj$1#y0`viajbscXNn6D~VleVIoYd%lbih#i>~F-|5q97A0zOWhXT zGf@3!nSOhKqn6xEMp^F{U)!r_TubJGD^TVgR#vz>DyhsL0pMjGig7|K^L~_h+1(IQ zahl3ZXI>dF0hMvR|HL#2Yc9c-e3%OCuJwnEK!s&xN5N6(`&pqq&<6eeMtYZzoymDe zcd?#3SQQn~4W4X58dwgYuJV=)B(L-tP0(Qg6qtaV6UtGqEyS0~v0rL1NOw|IS4ww| z##!xX_@mkF9xoH~5;NN{p3vkplbV-LR^e)C^YVOA8IG&ZtU*6`4&2 zKqboid~I>!4S;I3;?zyhxEW90`w>*qO}u)GF0s_Ji}@$$JU z9E=!KxDt>PAIyIK)61DRL6}M29Z-O~kT3B&N%Q5OR^E)hrbfuxB-I*gSu}l-(D=@su*zRiYs5b2_13%!YKp_q&BRqu zg6uawTz5Ez5-hoy68KW?fIVOcc)vmNSt6`?U$MGJwdDrY0V1M#lXXTDW^aqyRG-XR6IPkMX;L*se=xO>_q&` zdRk*7?6E=&v3R_FIAVKARb#bhL^% zVsPMyiygDjK?Cj>sUMxw?2hg4gh%+L0Oh@ER2k+!j7hFiO<>~kaQ-=ktaVJew3Vur z4h|eqBa0u8@C@mcAcPZ_=ed-Gk}B8x(e4_3WS1v-1e;e6Kyb zkCMEjWE9qS-Ozv6i5SOuABOu*I~@JQbrG(9&FH=h?%m*?PVv=bQg%;id*B`xCYS8y zPFE~`u&+f!ecd-Dy#UkzuP(1|s8W4EMM{> zZ*Nho&6kHI%w*_M@0|AM+}>k(v)txtx!o1aEilQ^Zz_tOFDVJ1Vbl8|+Tt6k{kt0o z*vMM*TR;c*oc|z?6-rS{Z=|e(G&Qh&m25}eRgZH_D^w9zO8nk6eriw{e9gYLp67x7Z-1l&Y1RZP~wqBLAO4vx=y7bSr^aTy~+$V0)YUx=t zP39A9v1#?Cb&3u6gCvh%Zao*(NiahNBUuw&Rl18EFQzZWTP%Res?*n^9)+s(_uzx+ zD=DsA11rEJCgDxL|FqxH9wur?AZ1Tr@paolW+Dt9I5jc3W>^Jr-N+%LXE1cb(#>V_ zb?b$XD$pAs^Z|Dy^0#{!JzOa~oi59U>IbVem*kzhTwEvg>me?A8xG&E`Mm3VBO6>7 zZlB&pT({erJ?1IFW;l6|{|Nd4xdMuOh;5C8-R{F;xf)fSzG<8@K3-h6{qeS~AMcGR zvU5|eoKe298I__NHWk?TNBOYTBu)E~rJTOh-)=uTEvV#~4ww0}q3QBtn4#lg6yEqk z&4JD-qS!4m(X$8DiRLgC%YeY^Qrfu9Hl?8Y@jIZ^GiPum?qTM?0elVN=M(C=hck?3b>EDN@efc)`9M@9SjcTxUCPaygsqSHQaD1E1sh}4DcF%3>k2RhvF`=2H(~@X-HkH?e6|)< zkO2>IVrFUV*o)jNWo;b%4z*5xI?ibm)6azT!8Sk{28rTW)sE`J_{rwJkbrxgrI|os z@JJWeb#0bj`itWTyR~Y7(^}le@S;cq^0jFWi5u=Fi|Y@ZsEfGhHT&t``V%E;l=+tGP!7eV`p2%EFDML!+^27IV{wyFC>AbyuKm=|{&t1;6*X9ep#ErM1xk;MNwC+G&)sg|X5JPqJ z59$yC^Thxy`GWXp76_ETTVUC816-udYH9;iStqc3KvMMclr?qG^#7jpLyaAUV!V?U zAER5HtfOwb%w{-(e%kUG%7@(-Egji)m0qlg+1v#l3=Q>48KY&m)c$(>31#U3I@F!r9KT<^AmZW&1dv>#yC+9-%c$* zMZVI?#H^)x)6131Cn2RRYCUK#+{*D82a+5bZ!|ekLL(7v zxd`mx1cse;kVEyTPDT3&bd9Gvp{`a?4j-Tm1h8Kdb|GO`=&&0ITSVA}I_yV;eSok< zI_$HAO(X0BI_yHib|-9_4m*vov4rie!;T?r1h7o!4Jdy=yS_wG=#n#)<|Q4aJyCWA zC>|X}B+A+VWr~h+`c_b$3s6SsC0^a=D}%oOor=;(+!CGMtFF}TtFh~*eUFz<+^g@9ZYntiBm z<4(KQ4in#m^n~4XSLTq?8JTEDQl)`>W;40kkreWo!r*lM{D%HF5}wa-WTngJYfR}` zKIj<#F^;;6l!uWWyYXm~&nk)7vO-h0qQmp1nr$+sTdCUB$5eLpQHfkf6W%H|-^Zao zI7;+88L|85I>6X?#2%vi025LX%S|07J35($$v6zb+egivF<4hP zaAU`>eEu;lHl~H6*K)KejQa(SjtxUE29pVC$2LpnS5N>~4vGi3&I`96mx${n3O|W?fV+8N$ z#+cn;nx2B0|OBFI~eE zna$J0b(4o;QY4mEP$hd)yGU z8DmBiG?nBFT%2+XCdYLj*piKJBE9H2fvV#ro%tbekdsl5tU+c{MAslk5X{frQ;wmt zd6*4jr8K-zF|P~cw+tdf`9+{O+K89N_*w+`@*DBZ!V*Sp&qQ**4dXTa(;D%;5(9bs z)gK!2NW(8yC1SMB6FUo6eBr zxhrV{QO;N$w#Zfnp-Ske)08WA!K-d#VluGDlq+_RnTk+c_lP7ugIC15my6GpM^~0* zEEh{xLATV>deKt?v%6-JKfNDKc9@BCUNp2-1o$#m>pc8NWI~5M$?2=b(gmP1U&QTT zY#m4#@~2}ZiERWpIQDdQiAy>%d2?67pOkhOH8?wMdtg4L??opNqkHR&Q2$An?Mumg zP$+$+%`352*E%p5cfTAttTHrKO{YLW#$=Cbp4Ld29&zLm9jA|KV2-!#8 zA{ojp^5<^VAbA=br=^OYTBiG^)LS<0LMn@I0PX_j220$vv`SF@?3VDvB*2tzX_dbpWNar{x=*iEl!atPPD{gWH6@sXT>{kbo&qE zHP{q6i;eO$-%@(n*fmrQ$-QHM>%(I2?s5=H8LfX@-;T%2fA-t>U5{;BjC0 zQS{A=L}hv7)4VeUz+^X92T6XVe)bj!%{gA1SBQnnG*%S6%KWdmu2q4*vw4{TnRk)`4YI{%Y~VZK2l)8cS77^BP{I+yfO z{%qMSKSld*QrwsFH|MZdI@d)^drnAk)z&?AR$nGM-cLzq$$c8vom4>a>0b=G~=|e9mUL%qFHcqfY5yFm4~af>BixZOz{5$~V4i80hQJm2H2=9q^6D zO=od^o#)@sxHr&;0OBgZ^Ilr=j}~#nPD`1=ecm80 zq5rOp-SB7M%V3UXVQT@)C!h#v26bRn)OB{xcJajupRL861It>qabLi_hqWN%v}|ys zl()U3r*0Gu%Q~m|M#WFrz9PM>ZGx12#<0VEp@q1(jxm1r#m=2O=?GqY_e-$e>YEA9 zdP`Y9&M9T@z}M$M&f7jIH#fJ!-%+YKe@onXUn|+LJ#L4)GTs8A-tkKfTjT?3JLV=@ z|0!3y;taMp+(&5;AN>gwygaJX+qb-)Ep%Kq2Gy?l)n=^eg$-EI3+pMSuE*@3t)tz4 zT$tN}G+wd`iPPm%mUBPAvWtI%{!;dLHs93uZ1R_=K)L{P+q>xQv-eqjjvIxlsMzk3 zZ%(X1F#&7AW~_nl=`nxeUO<~n);&igFKcmW%qMKg<)XVMQ~10TS7D=f!EXg;Wtm&db8)~HFGvcEP3ZT~EkJiu0=Uv@lkFRXW`^881 z(Aq}+61Q#w;|;45+Qm^P;ZN>TpT&>C!_eL8dn7{g&3zy5eUuDcjc?73N4c?xF=3vV z@qSv_we(pn!ue(zaQcU;j<97PLNyP|-mQMkjOcfK(OI1`tJSggIR2px_`@`f> z5f-^}xOZIphT-zKYa537MqOofA8D|~?L%J?caEBKLv-;fg!l%KdKirVauiLEgQST8FDRhTZ3<4gt`pHSsxD{Nsl{j_05d1$-|a| zXc(4V&m>UKoueg2e@K}O_036$P()cuw=nxd1P#eDa7p&51vbp>-S^NtN+$p+dcLS3}UJ z5QOOpX3u5VrB*&93VI8Srx=qkiY=${S^W<7(4A;44X#JDHErPC63=B5tYQiUn*1YK1O$c%m8m z2@Iq?p(Fp!rLWzcRPgctBAdsw9*v=a-abx^wkBh}LhB%Qw~XeD$_%_vY<170PxuX^ zoX~qiB637e&W_3GKYi79>hIEyM81GU`H_fwUNlRB6cBI_DKSFU*w}fkG8GF5i#t6U zr)hS)nWLMLY5fD;i*g@6&6hZRG5Fy+P$#jp8)7IWRK1mAu1l>%&vl5=cK7x-Sefp+ zmz7Dq9q5q1TD?)7g4fFXlVQ9uCgP;M0V;bbTa|bdF9JzZB}jz7l`AE1Ff@ML{B1#t z5YIm~qsqf-l>M+Gyr70|(|^w5?Q%w;oH(4e$_H$|n2qT0VgB_Lc<&lX!w%Nzc)14N z@+}01^!u}MuVNM(iZe(_mJsCJ}Zk9JwEsQamOv_S#{4GPJ%&I9aIV6>id zH(+t>p$_a>NLk9%qBHiQXyrX5R7Gk=9r-;^a;BmCNE(hFJ|{JF{3gXk{4W9x{*d<# zIRormhE0R_!Ww91ymWqC+GSpd(y0fK5tdcxL9pYfZPh#o9Q<0nmGWZ!zrMZUtugHo zS=$Nd!VitCsBye4Dg~ygBc2F>qsV5qf0w zl%$LIibcvGEI$MtuNCR=YS_O~aqz;6u0d<#A=jTZq1{wt@}a<9=gT5Gq)U4PhM{#^ zV+k}bfU;(_JDPN0UK(a?$23i?KwGgwttKs9TE$gRjPNA&BLduW*9+p}jfm1@_$o?@ zi@4Nw;JiPOcD^y4aC639>)RVe>Rz1_XXwL8j$l8lQ2Rle4h-1E*_t!p5l%U4Bhhm>@I3?QJ2yyE z!+=LQ@F4@79C(odvp66FaGIraRal>*!O-5D1$PK z6HaTlSI#$yy>3V20MCh)bBOwGn53cXi<|6?8VrNYTcQSh?V=GV2VdsJrTx2rvb6t3 zg~Dq#q9=z$(KbBw<;q;JtD}gH+w)!EErL1_P^f({f7KS~*wn&l^~xk)rHCZOo6<0Z%9p5O-vEie1*Ck$5+#1iP}R!oT*kY^h&jVx4pxDx3?-4iltqkW z^dV5XaPlT1cO^20KVRaM7$!cO7`?_ZRtikYrWYxBqVY3gt48)jS8_>$H|2iB+1Ght z{n5nXOPobQCvm-#K*r`7jwXoI?pkD}bl{{L8R-b9Fx%=)AZ#JacvGTuB5QH63?frV zWLuu&F{l;DF^ldp;i3GMq~pMuYZGi_hHDVVV+uTv>-KS-B)&vuqN zf>J-mQr~#Fg_e3gOZ@{n8dFnh}>drj%ZlqAGoIC_%0coJur$p<^81Rai9DG;a};G}OD=`7?Q zi$;P+xh=Oa>`7n`5;mV>-)7h&!0rVWd2EGODkDJDRJx4p%fO_}=ECP7&PYhIjwk6$ zYK`OM@gN8MFqzKYe2L2`yA)2ohoy$TzpQ7pQlj0=X}1JuFLT-qq8T}@nNI8gPR=3n z@#mQ_P7Km2yZ&P)b~Mp;aN6gf1vGkwlT(TO7RXpJP@J`lSHWkKcR{yN;8GUx)IMEm zm5y2P#S%(Am8CX{fz)=Ex(caZ1(!0Mryik8ednhv^>59Qx(6r6vqZsu;2XH2(w39j zF%s1aeLY^;%+27(baMlC7peC^R%PW>gwDr^qZOrN{ncgy-=+k<#4U)k?rDqiJTQSC z=iSeckPersl~vCL(!EaUoI0)z&`0B%7v%B~*9aZgKr~y8Yiy9qPF!(1uDKzuome4_*l{fXn z%^>e>kjRZs!;HVujwu=d;h*A303IMPSC{R-K4EOt!1EqI3(F)W2MM^{>B@zYND0ub zLtwi%aa&Qd%soM_7l}*MaeWx#iVbqPiAzPP0D$_wxu3{+20!atbB@E(+LHi4(OocTt^F@aFuanv6{KP!b<;?RjYuPibhSpho|6iAs;)#j#|&Pr_$lQ6JW*#YCm)zi}*~M{DQ@DfFdW&2>y9Qj$hWWhr1d9C zHPc9~S!mo(ILc2pZl&nprzrXnMf-7|8HC0s8($*KNro}LiY(yVUBAM2cv4%La9hzp zTTqxEBa;F!Vll}-p~*j^FYvqjv`0gfEr9>dGw==}HLqY=R(@ViX5qw#?b!|?ccL>l z*Is;=d4$tpF3dGgEOZv+n5P%#Iqbqzdr3*wB)i}&$jZ+z%+7MybIe0Vj~IT}U85ZU z1V>?JQE_3mFp~ZhIP>!bN3qi`%v!K{sVlSSC#9VBFy~L4MFo|;OoS$RP zD=|9?3(Yxpdyz1Et}yNvVca-jTn}N~L}6U6Fs?utS1gQk2=NI$d-YE2(>EzOYhree z9TaEJ_@2o;ND?~d=e0B!6gteL>U89lSHN^7B};_Sj$-5}JUCu>xTLV4M|R=Vsf7i? zpyI4aQ}I6;26uP$ijEx`Kg zRWi8|(7zD`aj-62i3bqEz!V{SvORlBW_Dh2wlhDgxWB+^CbJ-`Ko=#gv-9#Y3k&jR z0v(K}2s!p5$7KFDMc}@asTnjg-(D~Y#4BP`gh{+6?ZF6)Fg;Yi!|aa9g*hd{2;KnT zAIxeJmC{016|4mg`y_j@Fetw;i>fO--8ZUpbPil?Nt~v*>0vHm?zn%nwj{LJV4X4O3WxK%4nXR=a_7E zfUU%llb<)yTpGLthC;u8`QdM5Nr=$(+5&?lj9LQ+C98gxR> zo;`c@?Aqna-aA<34MC@>D8xqpTs_W`tY|4;qS{1;>$K$>3I$ov?;m+@PT zEmAK+rxqvpf9jX{Uyx}HqsPpFQ)% zmkU?kpuxx1+|hpdRlQFqYVhI@67RS;Dd~fe8hm&Es@;e7)|Ap~7L1_1{kNGhV{_knm9Fd%|82i+`QgciBfp*2 z;Po#5zgEBf(MMNZ=5+6nIr+xSiH~*G;L-6Fy(joLZSSwa8Mkfx=OWMWrytPZPv^xI zb(((ukVAu)tUtE?rB4sNzC?qU&2^N0nfYeJn;QJ(^Q)IUS(LV6s|HVLCw|s&Ci?M1 z8oVwc!Jg6iy)Wpji1b}Ee|Bxvk^axN#oiLZ&F;SQj;*F~N4snAo+nE3p6%4>Usern zy+d%H{phhjvNgEw`ZiDGL?7EUPlI=sjVx@jbJHR${+Yb4_Un#}ZU6CCA8Ih=0qd?d_sd$#y?%NZFFMI z8V&w%7*TPnj#9{q&_lKm4e{ofjp4*Sqrh*Dh)BxR;i{TKrs9{k7Pg zp=Z}-bYF*wU#!(&Q`)LGY_@5i)M@bbD^k~;Ui_A~ znO31)OYeW(bh=}$S%c#e{;3{YWqST@4Su23ikB}wZ#t0?p#OgN#Ba=9))Z;5=gT){ zJv21U<<;PB->->GJ$mzJuW9fSf7(MmR!mx4uECb|i&nhQzSrJwHTcaNep>s)ch|l6 zZw>xt`k{>lpFLeCVnd3aDW}dne|N-V@7|%o;p6GeVb8d*XKdI5x29)4mo5mi+rBec zofT!_@Zo`+e0u)nnJPlBwkr;toYVc>^jPZr0)Aq!h;A&n%&bAuMP(ZXxA z5Vw{X{aP)&P77nTklGfd?}%T_GQa=n#h6p-ezSoim}nG#89$2adBWqbJ{h7<)_4y9 zw+%mnDIT7NV8RFZ0`!0^3hDO&ek3PA*N4X-n`rxV`UTSW0xpoA;`(&@_?9r5S%U+i zo;N%m$V-o3tCM#Dy3tQh*T;O-uModC zHQK=^@Lil6BJYlegWKWXU1wnz9Pt$xuxO)y4Z=kTTe&bF5!Y>S5aGQv)1sIMbjif{wMax~b@2u*0P z-yocf@Hd2OsR5T_JT&2b2!taMUWITqLXq%j&>axk5N?8PS3%F8m*XG`;U$EZ5O%P@3=_Q0+5cXSvx<#0ea5BO*2+I&2 zM7SDZs~3jM#7}F%sb%g!u^9Af#(32NA9!I<745Bs{`12rnUwhd+#a$L}A5 za1+8A2s^xsd=RccSc5SCJ-@#J;m-(D;V=2`!;T0SZ9x4YJcw{F!VVk3hj1Li4)CWH z2vZRrM3{qc(I@aXgmXTHoe9R_WIX(9CoUC~Aw0O#@2^ETZXe16|JwNt%8l^SLFj>S z%^}EzpW$*bdknMg6$~>>h8tQnkEYk5Isr@9#SctFI{1;$+BB$j>=3c#^k|pR|N7)R zdUd*m5>om!{LHiY|0P5?dI*pAn1op1Cs2CrA-N9xOlYt4Aw(g)wP|6*z?f#~zQ{#U zZezsOtDu3UC5>4t@JpKS_rncQam`F_W z?#P&CR)_;g^q=t~`Ur%aF2yvP!}IF`6n)&Q3)P_`~7d}c_4%2F9&S} zXrZz|{-97<)@yVNr485+X{UpQ(jNeQ=U=36(C9tG(ofdX$6`|QGtxIIpDzC-&@X}B zRQj=?cfg5XQ|V`b-WBv)gZVcu-*V9D46!Ns>p>s)7xE8)p8pr=8$e(57wKc+n=3$X z%DzdU?*zT6`WdUy@#QJ4enRCV|DB=H(M7SxjK4HK)j;M%!szNWVr;11-vduuj0Ho& zc;sF@x%-lD{0^R_;OPC4IB>1lldlC%Nz1 zV$hWfX8htoD@9s-nlDtY5hQy+%Mtk773eYGSqYv^K_0ZD0gEEj+))v!oHb%1h3PF> zTPg*AJjOFS@!wBvz=OlWohWk3W!s}u*LeJC{*sP`Z8$! zEa_$jEeCQ^(O$Tm!Klk6k=DggKI5WhL-8%%WCKal>T)FbkIvNDqdEHQW-ht05vWI-C0l51h8$+jNbBqOulI*sae~bkG6ru%mgij5lzT3eIOU;FN zQ^C9JZ}R4Y_rTxeT@Kz3m>c~a9kzmZLQ}jYr0<}Xq%YMc&8=2}clMRa#_d0VF@sKa zGDoAo0RJ7t{~)zZ^kW*WA865-uU&~Y7PJmmpmjo82Wau2h5ETcG0g@pibVI2rd1pB z^A+Hm0ludE+zeUe;8_cvM*Si0YiKBBWuSTDjOjX`jDk<152W-0I^eDUA&*YfNn`%` zS9(a`>Gu~rT_7SKJTt&Em+}qF2?D-f$o-1?ajK`~;Qc@By=Rye#TG7Hy}Nsd$uoO~ zIN(IWATZ=4Fi6fw5)lakA|OFPC4&J)1OWvjC{MFkUr$4JJA84RG9z;Mj^z3-~3 zo;{$R`{R4=uiMYFcCS^nQmu+z9rjM}zL>~s#?WqL?UOvh#)`UM0snbe8=qf+U&cXp ztRCd;ijKuP{Q6?#Dda&oD*~rFidTnf3CJbtm`~*4-86oIYggD02>Z zxQ5R?o+n{mBF|UgY56~RG7zJI|H8vqHwDjB@Z1hO(Qn;-x+R}W*D$)W9!G-r2k;)| z*lx?%iQ|+W+TA1>Juy#r2}9r$V1_-MZ|0c{o>yYQ+ z8fOODY@nx`F0Xd)JL{JCwHN$9Pfv(nf?p>?#Zlxp#q;kI68$QVU$;vmp+8LZiM~8I z_256~WBhmxHxSocwG#DpqJN#2`CXQVQ6FbZX}c!Rp+?RI@QKGM=`KG06n3k|KQ6My*;4IbvCD&u-0 zkuEuoZfa-@ulmT-Ck#1q>|CzTyeGlq^X>X9^Ab=FF+5~G!TD#C&pC_-ml-^M{8L}r z9_6@*!ikl49_c!B3|S1z_MA6%I3US z7BuDQGqIP(!}^(F{NuF6gX=CH_Wq{j`^{Xc+c^Je=sHH*ksX3RKb3bz)Y0gv4d7(M4Fz0pV zyuqBen)6{r;X(!%>^SnwxzwD?%(_63t?T#(nONdr8mS-<%OLN=b?xM_pZrgcrS;`UHTq;%)pV20AF1C85t_1QiW zU{ZtAM0~%)SB?nJeh0}i7V>A4?fr(gZq%z*1NVqMe>@(NuN#dt&%+MPZ|K8S>s0Rv zrn!eaEGvPu7>}PZfHE$~lq%tA-c=Z!s#H<80Po)bBNP$UKignl|E?}JPLLGsf3v|d zR9!F9CQqsP(Q(Wv__%)!wR1)pecd3gnc$Ks*#VDThhnYmihobZM%BHLfFL!>6Rks_wQh!g8A@9>BhnhP{w!Okco$Fqt-#Pjgu#eYUq$&P=Co>mlp4ZcR=m*Cfl--8r4{vKLRDE<{Dtd#hl zm=>ez%;#{*$S!lPgr?l}C~0=@WU-Tb3+Bn}A(zIpd2sZN+=Q zStmXXkqE`xA^dK9EO0O04x0RU4Lmjwj<130NPHjCqwyQzUo3tAQ+8539ZfDdegr{J ziD$t5)cAjqk`^Bhm(t^R*2Epv_=hl?6@S2osdy$rnjN1G?{nha(3mU5_rZ~B@gJeN zdi*6w<;MSn4>jU1LbhhSRz9CgX!8qVFY(H}J$xoTUyg#Ms5ZAi%x{E3WNvfLhr0L( zFy_TCMD6Fr7vr}qJ{vkm#$$-!!X|Of&Q6ovTx~c_#zGNJ*K@5AYO)=_%4^abjafyT zwBS6S(WEm^c})iJRMun^PYa8u@T447JWr62D!x;eBc3Y0QBtC+_-;WmRPp12Oru*KMtsk zmXPgKmbNpjO8WOP-YRa1cTH6KuUX_!y{FEYrb14<7Kmu?Q5VcqIZirlWT;D>jP$Bb zjNeG?O_A!35g9dYmdbas_|=D~8dxR$Yrhmc1KOd2D>=#H#PeusCp*bBr_OAQ_EjhS zaC8v3#;-xq(vPr@^d2=`)psJ)k22nkB1swZM~4?7DgHBM!ZJ{13{kZt)W z#wt&>;rJ{3mB)rRfWOHmv_ZsVB)h=E_NsU=+`$o2ZQ}Ws^^P#_dB;b?yL2@$TKV}@ z-upg_A$wIKc2;|Y^sUP5UusyD%Fo2AqPitum7mk4Z^;-{emk9tqAeM{${$rDSB5H} z*uW~4pYYp*K8x`XMvCyRo=)LwM2|6 zAB)sPY6iw+q!gT&u?SSb68ub9jURtKi+M^D^nrqBSkndk=~BN62Dasuh5*m#(hKn` zTp>i2E<52-;Yyv;W)(sqVJw>K`~+?Bt56S4lSkRIoH*|Y=f&^BZ<#8bjEEOKT@=Mo zS3=gVJ>he=qAk&(NXcyrfua{PIl!yjV+d%`FA-)$+jh$XQN^kLDyY)7HQ`E8O(&UN zw`|M7t*D+Oqg$C8R;TC{e?7Im0X0RhS79_%?hf?pqW6V}=AML3QgkSb&k|(hjzIq} zIwm<)a+`v;=%@52k&~x-Z3aaqpYw$(pw~$cik5SlpgrM0&gp`#K&W#@2>L5xm@`Vyy5t=rXfF&2InxDw1T~ejfar+J z&@;koou^@`?Za>)e2bGrLbaU^i{V?H6ow;;`8f10_}iR`#v!#{oFD!ZeDUolvnu`o zqc0m;@p9xdipBLLrBoGY==Wz|dK{yDOH z<0OlQxj>}K9CgwG%uHDg@)skifFdXT22ZluNW#Q#(Hy#QhaOi`YT{jB?QW>QE_dD{ zV~=c`02Zgs%Lum=!>b?u9CGoJVpxvfiw5VYuJ7i-C9iQtKg2wj&A@9?c{{jN?j(fE zYZ`9}JSwAu*G!0vTn;5(b0MnaDwycgofM33ASWUDSjPuzMX zU#@#G3RHD1!s-;HvG*e0`#|BS>ZA2I1vxskicPIS%LO%bYKBcMMi88WIyyCKQy;{T z<`lHlsmi7*^gySeyH5Riud#Lq&Q8G)oqE)!-iEQnDN6oaJp0_HZey1yitE$^Hua5s zrq)xMfG*+D>n6Mv>B(Ug-+dSQ~AG1PEjYFy4j|l&9y*L zADz0vrk=;L6`M%jV^e!^gP3u0t#LZFn@w%Pvel9aBj%Qw)WT=!Z{v*J=u-u+<7dL> z`0?fEYhqNQ3OMc+d#wera{oZcyope5$EwEkX_$<&%UwW&?*#J1kMzD_MQ zsa>kEHYeonfQCqyn#CX{3K8vk1TMC8CS_E~z?o|_XR=A}IWr_DFSi5+oC}1gliQvFy->PzY3__X5OZWqZmJ5uWMCG$x1;zi=u}f! zm+|;7Rl>iPuDYBZ`MO-Dqto9E7971^z|U|b`TS7@^ttNL5IFCsGD|q?>?I$v`hu+g z723}s$11r=XVpI+!%ktyrru~$Z$SI`%&oz7PRv#G3CD=(Y{QkjFQ83UFivGJkJYZ) zw!$RmRPoFtGa?Nf&V`XI)Ja>Z+lwMnxvH&H`^<>0chzVZgo`j+u-!BPw=^JsGg5u7 zw^aU4jHI=?O8x-mvnGA6nsrmlpAN2CD z2dRU2#a#ObQ9o!18d#k=!ZCPs9@0Y&%?=uZqSoy#>G_|dgmotheu!E)?u%b4KM8hA z7D(3bfq{=9yV2@90(RY;6zA$ zXBkcNO+k}se3s})Md#6xC8i3-<0ro{N*;ejoU6lpshZP2zzou)LAZ60l3x{5f79

2RA|Jt&(lGAf_dDg(sd@J zC@6RuIkIiFr=sSw$DkFQoE!SsY8|2)^#mTjNNfBvU}x-AqbISbkMs?RlQ~#NY1bRI zz);gHuAA@4L2~{fB$w zqpRUXi|d_qHo=?8BJI3Uef0T=aeR|amR_ySn~Q#T@?Fkut5Emd{Nb488mtss-v?Pz zCkbsW6K`w1mD{RXs+)Nis%m@E>(k7{%hM&-zjI*wh)P+n`N&qnJw`O5Ga zHhG9TuL!P$KXIlqi3O7%mH!nQdZS;ZVXj~nTWDfFr|5&uIJz!W(~C@cenWZj9<7Jr^BZnz8D* z5@^dOTQEV~kOE8HbX$LSEO#KAf*~TjgHdvUR~s;f;>Hrz5r#>LDGr#2Fg167zyHF@zXEd%E8O-@FkCDNJ#(i&l~udLJhg6_?<75J&9 z(}n{b#&t}^4JTz;ab`|I=nRWrz0QGg+8qhZ|?Iu)R zbDg#k^dn50fHu-fr#%mPE7R(rO`M|B_JQ65xXO8V7hbzv4itwIV46nCsAy{BJrgR% zbv*nGIfF1)M-H20R>ESP9CO{3Ml|xh$!5*nZL_~+Ja|OHmurMI{gTbLmrp8^WwO~u zKC{{BR(X29$!7D4v@rx*AcumGGce{>vAmHU3E8DKJ2%Ka!(`KuzLGr%tFe9BI~5sg zvf2D6+w59_^vne5Wj6cgy;l0N1nCECcAG%@PLs`Uw@+t>rfdIyxz_4<$}q9pazK{i zZRl*WE4^c~YhW~!tUgA4$Ta~Qbbh6=HYx%|`@*QO_z+vq7G|byA^OQv9b$u!@PcY0sS>^?a%zH!JX7>56PZUQFvE>5hPjVr zUX{pPn!t<#jy244Ec3cV=A#{y9HzNnW|;4>%nu|o?@eHa<~t1Yo0j>}MCOGF%<%bn z!yL*t1$;7*d1L}JG#@m~4K4GYL}t~kg3rGj=E0iT&Io(^2V62EWN`(8gH3gg)%$I^ zUdX9RO%2mMmf=u21G-30t%AX7q9Pp)GiU8xA~49#eZLzq6&aMk^j$@!R69XdI?pgw zEs)^YB4m`?lOoJX(8bG6EMu&}+(6eghG}d?U1b9T-$mDL3A&b4)RiCT+H9B}si^Dd zak^ek(DhbDU9|&U9~!1VD(dPtu%cgIC+I3DtWd^=fi7>U7V0hx18uxE9H%R8*jPc6 zE9&}Xh1Q@dH8M=xmu5gY9Afp=%ovlgu`FS3LR;=;M0O{N1Wxp*P@qbq4Rffdg8p`i z`qvGpC^N^%G^;4nBT;5#`#=WsWn_sFVOfV(6g$g^sY(wRrYnR2r5ux}v8+PBiaed5 z@d2aJEbXQS8ecO^uWOAq2vd*id_FGghUuhYMk=V)b;s$7EYk&NNjnK^ z&$1(=pYJPSE<775l+9$jG#W?8>!tqrun|S(K38T&Ks}pM!zshJ@@vHv87V znfnv6XV~nuLH3q}>}zfIok8}#gzQIc_CrDTrwQ5nZ1&?p_OA)qM{TxU3aj+c)jHl` z#3@q65Da;4Z7F@swToMoo|nkmB!Rb^<+aOImEI7siJ9De_1BtxL~JrQZbG~Ryh4oJEj z8%0k<+A-}F0RLX5Hg3K@5$VCCRv|p~0I1Kq8=E85cw@?4d0=+^PjD6dOeBe>+?5;V z)aN~~XCtY+Bj&DLH@kk#45glroWz|scjeJpu`BLX>f4^Wn>Yc6qRHVr8S%T7!1q;P zy9V}F095o?DK?FIX_k=U6Bf=>5&zSZajwz`uYiH+c7T%%g72|^L*h>e@#9MM0hyG^ zDMof|)DPvlyg;kg4XIlrNw3m@sbE_I$;AK_U3d%|)V(#6qn6Qu+ai8nI&c%R_#yb~ z0nTy|JAph8$Ru04^+*|sy5|vlC_#-ID8!bE94U`Fv8GhH18jJ5OZ`ffTR_wYQ4C0V zJ3&GCL13|@+XnDI1W5TX zK_)qnF(N9dyOZL41D5B&`z!zwGrz;mmI+lTz{1wb2q*4b0hZn1-U)E81@Qq8-eV>8$g)PlttiyRzZ8%f=5o5V3_`xYQgLZKr5B*7+;1%gk~wPI^v z6ZJkeDtW&ru!za?60O)={3&msEmhp|fg#HF)To#7LER3`&X!1eYJHs9hUz1sEO$#uAm##@MxrqYzTN&1iP|6z0(l#tA|DxDyvcDr zw8(I9w4L?@s}6QIS@ZSlYSa2NO+^kH$&24q>R~M@qoLg(Y8*u;mj{aWAa|(*A9qT4 zw^}`{2beeL&l*-pQg>_#Jsc_KC{Yd7oB@!*inwPc8g620lV6 z9lM~9cF$2qsPxf@&;Q1hj(sP1_|Ci3;yxg+l3*?V4usF!W>TiSV_RLWvY6HlDRk`H z8{o`JsT_e2KzHnZfdhQ+pB=jn9bm^U0TVycBOQAHkluhyvPs9bZB;t9RpTm)VdRyA z9XpjO*|BFq18?F=$G#oN8h~_cD;7Rcf$i9@fcJR{vtu6v@+CkzwiS*A!YS4|I`*!H zkYUG;N8vv}I<}Qb4rGi-dB-jQZyvyO>|rKUA!RXyLXH&Mu^WNAAwW8I8IX1&uDsd* z-mzyH)rgS6u=gYfJN7-cNeqOx{s3tbqk)Vf!6q>c2tQP1I`)PI-YTQ$pF1|DEX>;8 z3|p+ko*l7Gh_^mJfdP&U+&se7hJ8XnrQd8srO!0rOJDHTX@apOnGGJkH(`rUmgfN0 zM#|VFk_*_*)<*h}$lwg=tdHdLDFfBv3Y6foh&=4UyTQ(yd8c3^o;Y?EQj5rcKNuDO zYU8gQkk*iCH)0T<0!UhbvEZ^uYo;y449r=lVz`evCvq*UjL+5ul=%z~oDT z(CaMD4UyPP*l?C3YXg|>0XX-9*bn425^F&G2IMDz46as#tG=a%d6C!z4EbWJYAX6M zKvl1F6t5vH(*8IYJistN5_5(ixdj;H;hvr#27x#Y;FN*52*`92O+fHlWlKra0l^PX z-2V zqMVDZURO1v-qlXhxeN`vX?O5+22|(#Yx#WD7kt5-dp7ci1F%HA)Ogq)GZ+qUprQE) z1lp@}ip&G2uaI1%^pClH4S~94HGL7(sKwyF9L|-I#>1MzFIL%n*qaNGDq9BRDiW-+ zH9&3xNK3IgLJ4|}HdJ3-YU40`KltwjsBWx2E2P%}uoa1gS$|>HTbT6~W<7;jKc;cr zdzsdXdtt2cWs$~zG{c#mJ_Ws-0pjVaKwc(6Pxk|P3lOEz|C(}#)EsJn%^lQ0yI+9k zFokIUHz2=|pdabz(5V3NN7_qGP`Q8Z6L~3JE>w&*6Ko`}r{vQ^V{eFDlwZ(!8x+>CmU`yjQUn*+}5U;xf=}LkXGz7?C zfOu_n;2JH^Yhr)~l22{)dOY~Y0kqea_5aZ8r@Nv_2Nh(oxCxJQb);*o73}N++f3NF z(E4UMy+GQCDCfm+%CpwDE5N@z2|$}gaP0?1{k zRpY7Kspf24LQoA?t0|cnlK`?>Z3CncAd_seT6J+<%R^SHR!unYOJ_n0R;%5plB-od zV|X4wR;%-Y%mv75)hdXcsKBmPmx1>x3Ujr(76_j|OzDgkV1-ixEU-Qh*RTV@@&I`6 z1K{vdjpzEBaG=u}i7Bj~?IxyPfVm<8o1-Ju9ah!T(D5XoA*;;tM*__9$Btw;tiJ7F zeFfmM8s7o(HVIbe=Ro-AqsvPD2?*cZ;~`D|Kv26D3&xBUkuJGy;1QdCI19xAs2JPv z&ql}zu()Uh^U6qveH3Cl&VfK2Acm}r8^{wV$qHHZ725Ifng865 z#eph|v_VH%EGX%!NY^mN1Sx3~80YiX+Ut@)$YO!e>d2{Wtk9_tD5FsNXJ%r~83ZE7 z#7T(GiNx4MVtoRE2Q$%Q8eo%8&52~Yn_wYXR@!qS9k~=vo{1c1PNd&Dti|O9Cy2AS z=2ab@!}ie>eVuz4?Za8a?km^1QzFgCG6*jA1tjrRhbgEjq%8tH2Ow+f^YwM^X3&oU zr2pr3gKiv-;%94}X%YWxT&zp~KLDQn0O|i}sH7+$lWfxe8-r@VRBl6AH7>40A%Xrs znrhhpPXkX+fb{>_KxP0k$tL~3mAL?u{%_UzfkN$_>Hk#8{=WtqZlX$VLp=cGK7jOp zs~~!!0^9%jfZ3B2X8+#-$xx3PyoqqXVy@ z8Vr$>BNzp~gr>s)83im?M8nHj0~S~DhbeK0yAZ_ub}BR~z+ zteZMg{E@Tqlag~N0!mT_0JKh zb_C1TKDu!BHoI{Ug=-B|x?LZ=y)+7H&jKXzb)!DI!1F=#$yX_GKV9HkK;Hz2a*Vw# zC~!Kv-D#2R%1DuM^$GAi3h2WjvH-al7C%EkWs0B7`GSdhILsvacXPX}jI9wjFsGvo zLs(qMc(@0Kb^#EVHO8fXiOV$Xh@YzCVxLtguk^~Nnz)=A$>-2>0BYX{NDMmZ82keI zCxBG?r$G#wF$SF@DL*gbE%~{}spx*;X zfm`YV{{{LFfXq=TP$KG-#e(UkRix^D$dx)tLpw_0g4eTb|Kc^T+v{ns?Sm-=+6tR$ zM^h#BBJ!fyNzhsyARgA&9<~JC9N={_-u;WQ+p$YiQya67%#?@ds5MqIBJV+Uhtkdf z2~kZwMV$khZ+MFXdD?-?Kwkoo5X}ujWU(Mb`H?ESU_wH4E%^DUvhpn3zj%EydUITR zZJ!D%57AkjOo-x<+N{Jop>-WVJj~G^J`4J3fcMz{4$&aA?yxqd;4P2x5XCV$$j}jv z$gQv4P`Vu;Aqqzt(}6ERe+Cc-qS}EN7RwQUgecdjb(F<|5XB-nM^PRL5k5SkDgiJB z7=eEYQ7HoQ->}@ID>K~XAzEm?{%=^`O|J{B#Q^c}=dkRZp9;DR;0^rW#<(MOSR0#a zxXVM-yo(9Z;cz{MV*r%O1`|8l;jp~rHx+3U0OG(G+JUPy1^>+e2K^$yd*OeF z=m1K-TN^uIxI3^Z;c(V){t#okb%^#u=^FrH=f}8iHI*K<6N{d;(&knShA?;{;@lxXNPMC=A&Ns0$2r z0w6BGchtH6TnUxO=R@>A;nXz5$9WGvrB1$J5>rlMTT#`vWa5sWj+=eLvwVWujAfX_ z?O9z(r^D<3fW+uI9iyqBCj(@H>J-FiB1-9O4X3Pz3Ynl5fM*^clQ}X$EeEv>AQRNB zK-QAr1jRRQHj>~3#YfdYBEboY&!Q&-PhM+sOG5cWcoWNL0Ph;#(9m$DMItq})cd_pb z%Pss{K(7Lb19xc$c7c8wAe)>{f1^)jL=e@x!~Vt|ICGQp2zU+wWRo+Bj^_bnld}X! zEfU;xJrzhh65Mne+3Zjpl<|@{)17jU}n>Mv9ENhj1 zP}&8M#Fzb-g`3iWOF_>9hy%;D1N_v<0|04LTlyM(@&R*aMc9u(t+c5<;MoC?HpQ2h zzaSwS=eVVp43IWe8%QAuwyBOl+5@Cb{m1Bb)IwaG2W{$1<;JKH=Mr6!&SZ!t^KLs*H(D6jP9@Ii*;3&XNzOh$8b07>8GQA)No_#K|60IBquTJ0pz;{a0W4f?T^ zJoq(8XI9w%y${YDNS1)-a)4C&JwW&cEUEPEKwcujf#hQ#ACh3DvrGI6kV;*w|eosjj@ne>4)e)D}5dK zZvjZ9TTVZa(WPbx%|P-ncpn6y($6${D^_};NoA$yBlv$?={BGhD*f|3_%y*(`e=yi zN}mvxXDoE3pJyw*34Az9^V*kG$}7Dd1~aMjv-DK;5}2O>B=LhEXNBeC7r%i179f>A zLaVI<4+;QM>9?4IVAy7*j|}_AkRp}d89W^TQt6|Cj3B|Dz6i+0Bv|Qdf!s)fmHs4< z#{p95cR&*lM{P~4^nH1_Rt89=e+}dd60G#}e1roam0ll6T@tMH&OkZ3IW8rFYQ{=3+Q}5g_S~JW9z!Igf$9 z4>uL+_WKq|cq$SEXP>HI3; zKoYF<89=52q|)O?x1$auR{B!pFQx#8_j`ccNrIKW9mq>0*wa4;@+k>cx74>1~YX6)W8}sjT#51pjX<-3GKm zrLW0@PpwU*_lBsh^wwe7*3p%IimmiNlJPW|=C!ZTl~+2|N>8sJmh0V9;BaF=5F)q}lLUMEZ$S9H zIjQt`5xOWqD!m?%+5oBaOM*&QiR;9U$Zt;pR{97aXOdv0Ukqd(30C^8KyD_%O5X%z zBS0$M>UEXH5-a^h`p@Bg7x;Goq|z;?AIPwFc?6aIK6u{+pwjRE+e)9FQ0dA{+2);s z@^zvOXkw*zsE*jy(Dm4kVO}%!bQtC%K{fPM|M#%_1wbya`(ga&5mFY@Gz3oy$7aB? zTwvEO#!dl1-y;}lgdD~1H#&LY*bjqo<~@SeU}_4Gdjw;FoCU}vo7^L?4{6Cg0;|SV z7PBi>-Xr{oDtV7!H8d=zO5P*b0%S8l?h#nA@QDiSJ%Ufc`yqu7LpZe-R^R}sSt}e1 zgt0FnM+%;3yOq{ivkk#tA0RbrWs(CKBT`K*pXx3!QD0`T?dH;c|XBbtvlFc?oP&p=UB6<&6ZH z$O$s(hp^`O6f6tCJ5Ovkz1&#fcU~~w@MZ&+iv=7PE1U6TEzIcgq_S>TdOWFW#*@mk zQo$TC%-;vh5k2E1^LcP&%hf=wjAdlTzgql(f@JJ61i#0aF&}#$HU};|#>L&&zHD$g zt%sma z9L}1@pf-vB2XW9ZYJ{{Eh{;kw(zQ@{&~L;A~w}VYqw6(GT0?BOq}y9L*jFhs#;I1DL3mluIR4C#E+U)n@C$UC@J`=z<6E^$uLAK=}uLQ`f_LL6K zL!ci3=(iJm^LD~k^LB!7-cDFJN57rmo3|78&X#Aib8ywKj~yO--Rk=eyp?CPw?OAJ z0C`6HH6Xi5@EPr|fqX%N&uFv7x@=-Hf+q#~$}z3q+2W@+fbKA#3{a6*O_&(d0Z3+C zkAL1YGSg_|O}%BB36H9`(+W|K?#jKhPkico?`#!WXe9a6_j;p#I;QJ8O}ae6TFpiO zeMncEeQumxjEA#k=rDwS4D#g?vJ#4z1*koC=4hnJ8lw^D`T%+Abb~Kn%jyogGa&l@ zVO&dg)yl9DNeV&r5D?RzDbwxj)*Fs?dl_ML;~$2VF= z`$hbU+dhcA3`pXW1f%_~{1#Yd2`U>PO?R|EfZcsC=z)N=C!lPsFW*4B0`zFW)V>%3 z&+(m=Q?=!(4KUTO_EQHMy59RRojDr9d~;`}FVErT)&mWQuEUh&8_tkQ&VrXS{mRqN zLL+(@St`moEhS*6PKH^2=G|oAYZsYJk%{bds2c<@t#qQ1QK}r%4m#KSRo{oatTQG+ z;#^8a`+cj_1|#ICdNj1gulmGr7#fAFqOIST=eq%?uXU=Nhmz}%l{2_1;v)f%8(n^Y z8CI3^RN6rD@~I$IR1Lq+1$HeK@IITijJze_a$DeM4xGJ+4mV`z_6LF%Ll>7L=iv+; zK^1#uhEhiZMvDd0&k{d#+z6QX1Z>V*__-eHR|A|^LEHu8PCyE^{AIM1V{FkpoFqTH zT*KoK*l0Dp0OWZl&)}P49*$KjZ2R|7| zNdtJL#wI@$#o42jualP|qm9Y9{S)gf-DGc?6u5a7-Mq%nY>U~|`4U-0P+1cY{uzYx zA%1#+>H^3luj`M)btI_CfY?$rF{{fBuq2q_;SBdH-8~ZLyTEn>_^$(mZwBFAWE_|V zl``$YF+Nsf(?jYaIyS=hYtu0yt~cfAc{A0U%#vSdDt0fC1snXMWxP>2yg z4lbFqsFF+Ozo6k4s^pTnq&^A{kR`KK5IIqST{8CvZ!dr>ndbqy2p~&lE1VPvXIkr6 zGQTzoGVE}-f&W&3ESarLN+4rI%9qR=!21vY5*%I@pRYMW0TwnQ!IJqoaLX%!e7kEm zh@B#?ydO-k5|_;09Y$e=<`P+EIuE~+Hv)l^+Cno6t?b5j)>K7~7>?n&xb%Jsx`5EId^2?|wm18;FfY-1}<%GXMWuodie3qJ6;F)EE zSx#rNpSp@#W&Di&4w=@5#R)K5gU(bxdmEWkpi{*-*!BpREf#2==4by#=3un_7}b1$ zRUNwmg&%1G5el$8BkG*%$2N|ECBIfKLq5EFo$3lRUU4lmG~ZM9W1{?7vMMHHt0R|2_$ z1pU7a$gKcx)!+MnPJ;i<=zk5EUl;gqv8vYMd48-3hu2PE+W@@}(NGHzTY+pLQ37HQ zkX-=r-|FxJz1ddVNyh&V!TUai>HjxCz9K>Y!wum(;jzE>e^i41O`*2>=hLx89QeO9 z3woybso`{Z<%IB)17&f5^CK9GfD{0t`@buf^3-jVD)RlOQBt{;p2ugotk#I+<4Hex zTsxD@a$7xzkN4Rpw!N39=|Vn()v{f6ss?T1_->q^>Tu9Vme2bR^XqeEsGltz=J(^0 zP+#&6^M`RQn9&_Fn2_X}Pxajib1nQw=;4~u>1KJ^!Y`qcGfdXJPvYmp2Z8PkXf{R-@a22zcYwN; zDg2v?fqo3AdJaEN0_wdCr{<1F=8Q>GRS&)J`~(>XnfVDam!1p#FXKwLy)XZi@iQ`x z0-}4?VZRQA(Z`Q0Tn9g=7ucPH$g0^GD|SF8bKGx`(i&7#fcqPWkwAt3ym#kl1^iMf zM*En1xoI6pj!ZH7IUP)aW;-3YD{ok!=L7G4!@~7H>dafYVnG;eGW6D~47o2@0B&V; z^?UsRrB}e}B7ok}Kq6}-jXN5K;d*4QrfYFoX<;strH-CQ$`?HKB2eB1p%djj{2VI3 z8*=Ldr!Qp8!%1`Bee#e?Tk)GLOVw5Ql*5C-DJ@?oF|c z32;AW4v_Z&(U0CJw;fiK;^=O>oapokFn1JLnM}#4hK>r}@IGvO&maoV)I9RO?qqxf zUZ)6IRWL|q0m4;4WHLLa5wM(g_(`FiQv+FxX#>o+T#*~UWuVI)s{GPgzw%&E( z?We$2J!DOUoe==1CWx1SJVnEC5Siq4n*%FujxigM(*cCr2S3>@u=qxbdnSksmOhKUA_Ka3_;A?tN;?E_>o$GsIP--7x~%hrAqcYOod zLdrIBQs1I%CuG%XjcX!+(-OoGApHTEWOMr~8jn_2B>y)9F0L1vIK-!U@@sU7= zkf8Cafh-|G<97kModk_<1@a7lN95xRG;-HZ7>5uYUVHEhg#GcEJ{9>RAvcUJ9}JhF z85KFb>ZYww52vGq{HiOzn!n16<`qTrYBeo#Us^1FjoQiz&ylblFt$SSOX+PR?6kdl&rs0nT<1UjjJ{2yX%5-DEWM!UrlC zN%`O-J{*TmNdtEUO_DDlB#(1a_gSUk&WJrglokOgAVH;VfwTg68!9UI=c(zc0CgUn zuXF{W81o>oe2(!tz_6f@BYC)!+6hH>?f~0R=o~^L*Mk@jWE_dBK+FX)6X2~eng&U6 z28CBHUJxz}v0zbbrN}2IRdp83GKN$c3pN7Hosn7tCD&0aBees_izHa6qd<-Tyhn@) z9kD?%nwYN�n>8mTzuQJ{`t$XY1nN?jfyR;KaUWp=dF`3z{WBERF>-2q1N%tGG=N z8;hme*z|H@eWw{U4?*eyt7aRJr>z>VM+Jwmoh?4f60SOfn9CYA;f~HLj{}^J{&*PQ4G6}W1+eCv;rmHwZBFXv zc-aa%djgygh*?0U1H51A>heze5sO^r=w0Oq#dz?u{sqAGXra>U&(-PrlPmngvku5z z=J0m`|1$Ew*?8osPj&p@vV$9a`m&>ee{%U{M+3ia`DI4~|4d#Zs0LRSV8n(Huc)+8 zZNFZ*P;I|YxlnC?D1~ZpYIKh2d47lC>2cc;!~R{ zNHkTDXsSS)QVqCDsR5z>flv*+zm4cK(6k7_vC_c3g}S) zxeWW$lk3JUpdSU)YYf#tqs_>LhVs){HNPU`J3xKjmqi+H%ksk}av@`Q>UW0`Ky=$Z znBgXY4~MHd(NnOz4VBL8$Qlfu{(zK&Al6~hU#k1Y2#cLa-}!-3kFYqZR|yp?WtL`{hRB{*TsH>==T5|(FeZ4 z=waCKw_0R#@mI9y{LX%4c|D*Q;Oqdw+k8DFjqc)ZmJS@bZu&(oK=ym;#jU z45T9oN)G}u03c($weM@`YeE?^;*LT7*%aUye*uu`Bsj(|0&*z;N9>%V=zy4yc{l-v z4km{a_WWL0N62SORzc(%>fjSM>w(-sf=}E$31kxqK5?@H$g3py#Lc@v_%O8WUs^l( z#*(p^Z0zvK>=v}cP3AAae;6S9n3mHIWQ<7pCiBnW{VxFFZf5kh$NGQ+C6-&dh^4?76N>nUE#e;V?*d2>EvFaAoKVCM z!TUY{MI8N4MI09t(PBXnW1jz`E#lWu^Cduv_$QFxNwA1ny)Xp#p{8rCcz@!1>{bE6wzuADT^f*@jtY~B0diOjQ}a4 z<@5rX6N>l(c%KKLh%5i8h^vDlS}Z7Hvgbd|358!tdL3%^0i=kZ0{NH(i}*8;|B_%4 zlX_zq21pUB0;vp;B3kVsWwFE}-bFhsVjb|;0!R@prx(baP{bDCZ3aLQ@A;b|dLK;I zBK83newlqBzmf-mLz8q~#r--wK!fk=@q1$Dw=}|)G@lwZ&&=4Kp0T^Za3_FXR3S0g z8Z->!kU1LQ^)#9)nzA=q<<6z`CZyJI;&S}hLAWV*E*C?^B>=f|xe>^NfK0NJ7CBLYjekAx)}`=a2)6~&3Lx>f z!byQJoRov{PoZ_je*pMT14#U>OiCbQM9SlTHhA?jnG*jIlQl<3S?q-PpAU&C0Ez!X zAoBq@JbRlxG5(Rwra}i}r7&w@a9K6Fi7~`2^J{dvT!@Xn*<^5W{%Cmy`Zn?hs`O1$ zlpJzB*0)13Zuon9Qc?Ofv#vMG`L{i}idqA|t^vq${%ub#rVfGL4Upyh+n!ugjqZnq z9zbrneBjAP9oB>9>jw2}r(^fg%c_!r0JAL>nXql_Yq)MfO!$1A&Y1|j0BkW7{X&RrG(dkbrm4Y z65{YSpQBl_vUL;0q=bHT+l`oYO>x}-1B)Eaf4o=|6y4bfwtJ!NZh(q>Wdv4$Zz8g> z{*Af({tORJLFVsER^UofC-vv%NaW}QP4iFaF4c09{+mhv80j}ly3Asc@I=keKc?Gg z_%X_QFPdR4cr>=kb(#^s(RrUJb#%0*LyG^#?7Zjsw_k?yYGgfy5s2xgkkb`vI|4G9;a-fC^Ff^j=(H4s8mr@jTO)GJTm-IV$k+ys=K!s41mUiH zU_lP>Wpx9!W#v|v+>_dy7j!#bHKvf(dhKhj{lAxisK-vJjSB)ZLDCXX`LMx)V#>U!S z!{jK71=coY54_x1>jQngXy9%b7!G6@30k`l$P9pY@OW!?{*ASxfi;T-)}GCN-dei? z`WDdutz8dfH3?dK7|4SF&oS*RDBBZ%V=colIm%*zwWqU7uYgWQax3(0p#fUk3uHG5 zTKgQxA%K^EytNH~W34o>X0gE9GugLTYrjC>Pc%Slv4I!>0Aej4$VmXN!|~QO{*ASM zfi;T-*0yA)u*tJ*jiIlU259Y6AY~+2wxK|V0K8GhTYK_vtW6E9SuC*jRQ7CZZ6frI zrvX}<17sEnTDum=N`QCq@zx%!Xssc8zzu;liv`v;XG^*6gs$7EoR%H~@+b*f+6iPk zz`OT&OLteagw2I5W~lfT1>klChT|!xjkSG5XgzeEVG>>SF8lf~p1%`gjtre2!Pa{; z&Y|-UAin@)=(Iw<`U*lAI%$Qb~P{cE?NsN)Q?`jZ9t#>?DNm;iA1gLnYQU4ZIzt4~md*)-QNlqxRmK>o`BacRhS?RrQpO>pT;WFH2I zOX1T|WdL!hGLRfV`WPdqU4q585yDY{#WmD+YbfR%LvkquYEi?dAkG0Y43J3{_fMp} z3hFjMX$eZO4XIBoMNT~s=M6!P0a~>L!Rf>84CD@^tOk^h2Jtrd+me3_hjE(}8d1wFealqs=~oPljPfE2VUkQ@?>ePbY{0B=r0?70Rvlh`SUTc`do zbEheD9UVwD49?`7jtp(Mr->6jsYk;X8^ax;x&v*oTMq(q8VMSn4CFk3*DoQ$35J`~ za1!*_nW!yxGlttQL&4LLu>jKZ0IhOC&|itbS%%$F=@~FJGL$0`xE_qF0piHLK-QC> zBhLfb3h>T0l4b_75uhV6#&)C04+U5-hz$QrO z(A?EPmXe^kyMWvY@WSWmhVf4e2Qimym>gxXz})Gf+zHE}l(Xw5D14k2XzpboFOs0S zkAQpt@N$ngcihTB%;g8>EEbp>63TtYnmY=G-_QchsWb6(KM9(v0-_SYYka)9$ZVu87#q1qxLL8-P3n@b(*NQyAHu%%a*9lYtMYQ0>)v|UjR7_@TyeQ8gqB8(2+Nn7oD1*6jLYawAT8h_{WrPQ)K)M75@da zDg(jMOlHD$hTTys=+J-l@R)HHuH^vYOaYLZB!d5tDN4Z>4cQU zf>!ZW?oK!*vFrs^J*b?A>Jhk$$v z@UA)D+Hqr}Si3o}X0gE9x4D&8K_^?~Ptf-x4bWO-IGQp*tepg;2EcpZcx!)mP4#$S z&0>MIuX8W6)=HtT0S(Yv8IX1)ShgWR1_Qj8j<wQ*1ThAwg8^r;F0{#@l-TL=BviAVQzi)tiMS}kQ z1>_Gv`q&EoDfFgE<~ovFa$PX*m}|+%5|hrSye9_fShdF7nTVMj>9bAQWaGN>utJ!7 z&dcFLJ?=)xq)x{RFi4TFzhqoNSGRfc_!T=6Ps)A>Yp7dHVtps!}K)n}S3>7zEzkmeJ!52BA#dwmkFIc96cQU{o24XRg%K;ugyUyc+Y+VoXX%D9blf2JO zp1mh0Pa&4S{P?}ezHnzyg{s=!QM7*@R=b(om&$NBYq(J?VSUV#FLB-k53U2$+Zn>- zflU4Rg(tie#!w#f0nW#KLMul~5KWfjb#E^^?W+X5>A`86_a?!MW;enx=ysA{1NI;E z;*B8XZiA)d5779KQlBZR?=TNB@s?Mjmlv!UDYVoFR69T#zwCUKzDaTu=&J$pcFD&J z=ns$ebml5A>(QHVKDZ7GWbnKV$YhSZU82xoenpDBT~Z37Ccry%iO8!H-Y!9ayp@|5 zAe^xEEq)eiMptd4tz};9#+zviSyw{QB0!8cQY>FMz&tIeDtB;VLn*_XDYt?DR)E}0 z;Y)9i0OV$hRS-QvL2xtWdGKzfFmE-y0pv9R#J`3!WfyC6PJl(M{ZrAn{{YJ;;64a& zla69(2IQ!S%SL{|LM#3{i+Qc*Ph5jDH}Xrz;!zoZ>_+ti(gPs7QC7%R-$4kwQMr`i zZqyX8Oa#bo)Kx&P0C>Gk&`;Qnl8yYbzy%NWZg-&WfZ%P^#2u)OKpvqczGKhh?>F+J zkD#>c7YYZ~(KVsYxz#JqSc~(2ko5voYz4?2Fw5r!5?T&hYL}ZkVDE$XU4T^DH$c9k zFmDf8;c$Ri;h@s~0Pk-UW~HT{gV7g&Lt5ANg@UV)d<)uH?dAMLC)v94z?KV$v33^* zd=^Wr-Px334L1jWQ-IWPPaquux|r6IuPo+i)6zbrZE_!V0YHj*Es&J}DW(;{KV=w4 ztZ-1w+rhhz!Yt-vKpq9)kk<91k#bZQ*2R6EzxfuN*}7f?+cwH^uk#Zi9|5FwSs_;q zVSCu)&!!Ap*B@Z{g)+QNRCOG7fdJm5e`;N_*BKA`hs7}6$YEPrBPeMIke1c~NPB?U zc4-EA0hZXHL*|n=c2s$UnR&@ zk&y9#>7@zjEfdnaC8RG-NFQ0A?meB5&eamu6so~C%wXfaRA&zu^@F?P^&NZti12uQ zt6o1MJYL_UuQ5S66U}mnF?7z;Q&5dj)5a<1La)Zw3}~)=A+tt!*=Y{SAG;0eUnuwNj(Ajc7Xh=Up>7v$iuR^5}+A9tMApMo?)Q-1LOlI4Lteg$|}%TGmR5=sh9RS z9DEM+CZ^HRM%vGBL4UzC`rAaiTMaAIEC8nGraCCyK$QW~lF9)L>Uei*8{m0;B__&ufLzb^8**YrZ#gY{$U6{NKG6po&)kMz?5`f zAYrkfq#eD?PAmXRx*HNZDHR=p7XEV}^ao4Y#mgOc7tRxqbr4MN0Wz86UWAm$dB_5k zUIroytCUxv)V&f!9i$cka;^j6-hrP^pvnN=AI5@R)5}v9!J0n$5N0@q5sgPk6>z}{zo$CCvW-EjTpCOcDAH=#2`P`L}2eBRn@(@5C#Igd> zK-6Bb@IkEAw8$>99sI8Vq>EUYq(Fv#@(8|X_zrmA2EZ-0)>}-ihE!9!W&gYNb8s92 zWHMFS=}(}36ggZ&KZ1y*nkWpQE*$QoOq>Mo0gBV6F+)1TNTVmDI>C3RxmTst-8i!|aKAVo zAf17mB<%sx8LW_}Mnedl;Ru@*JHuIEIfF9n46}gX;#%2= z$(1V}p=Ugi9T{L+I%n*sPUO+m61m%O@!LbM1rDglMpH)4+(!aw75OP4J%kovX5&by zi5U5-C8Uo`NN-%8?pMT-16K?Y|j zr>35!+OyH>>*tzYr9TiHy#)cH{7oj4qiSWGqL7(LU&vLCBL&_d#hPl>X%EPbBgUm1KVD3n}us zGR^#pE6a!)^6|u9U3m$pRz_X~e80MxEVuV+XLx`kD^tR)FZJ;*8{d9O_=jS6St|uM zY~khYv?Z&QV{U^E>5#3~%Ej>Mrf&v*pdGbg%+Mz+Qr-mr)y`Dz9KGk(oyA}eQe!Sg zncsA$cG7IIfV=cQEMLg{rdxA8QXfS-e+VGUjkk4qc7c8s(3J0t^57@l&{pK(Of}{r zc(dJIX!6e1EO6unt9rYePt`}D;!~@7r&gUZ8TVZP={FehhoCzB&P3voL}E)Ku_KY- z%cnf3K#j>j37>P1nzSC0gFvhqa=$6=bGo<jXk|_}EwKaovD^_UMfNFzSt8#C+V9?^0d}qvrwSyOfW+@?FXepzmTD zzliy`E8nI395i1%5~nxmW>#qmK0*LU8-;zi%{r|i=t4lfG*~Wpz-;PmcC*=`OpgK; zZ0gw@uT@VSFf*^Bo=-#bET32>e2E)9h9{(opxt@W^-p^c=j5;O<`ZlV2RMI%Z!VCT zfbcO8&Zqdf1=LNEUgKiZ# z_7$=|1-OMEvZo?i0JjuGOCbCNliL==a3DiT^Z>CC$XpVqgSZLEb%1tbK-|_-N6CBG zAo`=6=2DT-!WJG_6&JAh2G$@As|>S{Cc zyt!55D~lEBT=@ggXQ+}(o0RhraDY5--U3KNfIM$*#iA!Fu*;mY!8?q?hmYZCC8xJmX=F&QsIgf##FO$nM$I7GzGFpO7+I*G&Kj7U4fCQI0d^w9pD8Mj) z%JKj4_8st17F*xX?CkSoH=9k{4M~8IgkA$8B0@s2ppb}SB>@{!E%ZAd$X3OY&%TFt}&c6+?%yLWqSm|wQ%wf z>;Z=xddlg+4&qkrZUk9*2zCx&X2QusuouCzoPkAOVo?pAHE`jR4>#EUL$JQK^%`yy zI%6~g8=d}G){&|gI*;pfan#F{>}nr2DA|K|+LB$V+p}bUM_!xZq-0m>J+Jn_$TY0# zFpehaN?px^ zrDW@DLQt~QGg|VBbMxCVeqCWoHra;_O1APYTe20p9ZPlx^12C5O147xWXaw}_*KTS zWGi%kmMo(Ig9XG%$r^%^9mtYhsNJa`D<$g=xGr!~vcd2SWPl|b0}tO93tzswlAYso z>(^Q8fi0=CeO=_X*p#fjbR9R{Yq?ykyOYbMpuYgF!z92-C-E#okHBHg;WTH7=@6Xh zSYk(9mysZH)!ndv+~qk(%ML{gBo@QTP6gu3PQ_5Uk8DA@Ah{D-!ihD#5T0}4@(3o@^uTm$>XQiv1X~+s#F}Q3 zB(3SAps=1KIZ^x)o{!+fn)(DXb|v6jQ@$fo04LUT5Ih6m#G3lZnE`UXpF6DSv-bdp z)^r-+C&7s|^>MNS9E*~&re_0o37oa2Qvy->J}A0;0&99Hpf83KYkCtrHwZdTt?y(_ z&3;-RV2cJUu%un*FxJA^I<|l!qXw)8g$LpCm?9eRDnc*Ag;!XtKm!h3Y51WA>Iw5>yI!DD(4<%G)KGVAuG|ghX8XwoM_v}@Vp0? zM=;Sg|E+oHyL~d+@qOrH%VK~J`3Lg<2~I5K$)^6>o`(HLIC<0k6caZP;VQT; z?}Tu^_-}SNeX4GrHk?K%7*6{s;rlEL#-gRh(Otd*p+ayYo9z>HjD(cfUj&T(9Uwc! zep-}<_i;-Oe~4v$eoZRC{RqaS37!mA$HR@G{SAd*v#>$4+sW{Tziz^%dBohey8|$O zI?JZ>e|s}l+5ne~Dp3M9QLIzsTw&u6CXmlwR|LjcAhG8Bo2-m5i1Neke5px|>ZZ0J zBY%K^IHYh(6AH|%mrWIW(ENEcx_VRRch(p*t0JiL2OqSypJfq z?q|w78sQV+OzqmNl=CGm|GwHialNhG>42F6C$-DhrIs?l+Fb+B4RB$7l<*n+S%W`D z&w1C9HMMI4JJ;?@Qw_F1Be^#V<1-_Hp}R=nvWKh$igg=G;6>#BG@R&nvF8AABLfM%O$oHn?&ycC1WpCacsP;3nea3+KnYw6 z&nmd^bryRU{hntNL;{!Cz|ImlDdfKqP9%_J zbYKU<+Zjg*WEmZ34O`R4{}z%p$N{@ZfOmaF0>6dIr~@02|9x;G zf!{*ir~{uO{66C-f!{)M_KVB^+#nDMI7R}45k3e`Byd0=fu)qdA0c=AMk|5gfZ=9< zNT30p$qY~e%iuW&F1*rW@1g?}ZGz|kul@0}iw=x5@wOFI3`Add1+VRF#HuJSVyi1c z{f^5;;9sy$2n=INZnO-6D?^DVh~4@*d{rz)`jAjk&wtp*91`jym;yW`bO?bX=aZL1-?yXVJ(X8){A>Sa_DSaiQW=+W1iZ1;pyo27o7rnk@rX zHE-pVoaCrkp`v$z=d>>wkI=$U^F>H4Idu~Kt?0*RZ@vLc5(D}mQEtJjZ6lZ0OZjP7PSQ=ne&KGqD82ZBn)7Q z6<*=ZMab_DW%8>Jfd5bf32d?mrDvf{pndLL<-m-DI+GQA_H75w*QnlP2DS{n^2}$U ziLAsm>R1xHe;#^Zxa{vTv6rk8l)w&zKY{C#gk07=iDB9_=V_JG9-(&gvAzkH{aOSI z)GAJ;?~QOzxX4|g%?CR9QBH4wtEW}Mc^tq0K+<7=sfAN7AlIC|A@e=@VpEs=VAQ}v@?tu#Fcx+qxj`Dw{3vg3Mc1H->5Q-gq)KIXV1g_ zj^v-=G|nc510QvfAN7nXUkDXe0|3?uggU_KgAwQpPj9$_Is^(v^J_A`^7nf& zMbC3?R~YAh{Rsy}c3y^ZYGn87QgaV`pGznQwfHOS$ z6vIv!rV-ihOJMTokw+)L2wop3%SjSwWyx3q^&(IQd^u%v0qO(gI&BGbVU~9;37A(L zOD*FVrXf+ipyDr5?0f~F5xkrZC%dz^!gD=b*tPRV*_|aAXCzJw8krj`E#(X*t=DnT z@@z!E0JP`f@))ZxM$A7D;`t|i9Ri=jvmH*F`aJ=OV@To}<(>&+Bu%}z5lt6Pn))LF z+^K}KO}#Uc+rvpy9|+I>4Di6&k?V#Y zfu%=P?he*674dN7G!!n6DWZs@5IP=C6mc#*r@=+p27EGJ;6n!Tp?*C~l57Vr0)8`` zxeszupb5S&>LWpuMT_4B@H<4nNzj|1)u)Hh@>TB7&!D6aAl#PsN=kxO;QPAxbZ&{y z1E`St0%SgellI>}K)Dzc{r3Mml7EKtZbFlnc3-?0)eM)jmqjq`zB{v>7SQm0rGB<| z816d(vjd!Dd3peTOIntFk=z?jOwLj89Kirh&KP(`!R0J%lC>3GV82eXz9K*Gcdh8A z19uvnWIi!~e<&^UGm+c~Cz)RZ&s7XC^Sj`=11@K_MKtAh3Xh8bVx{W*1hUf{y_s?C zN;OCZrSCKOB4uyCBoVmS1f0S@auLc@tvM#t8uBxknP!9)NvSga0@|FDGhST*N4U`X(nh(vil3B(Fdyr|S$1b=5mb&F5(hsdx4!DW|Jyjg))JHMP&A+;bpPgz$aogddR! zh9UfDzRW$Gj)#(!8_ckrN@GaeP;H1mj>(=nt6cN?kx4mA2KUZX>d5hT4gd%EtT8{R z8eV*7>Rg@$tP>t#6a2e2(*QU1DOBm<0|pqBapnhXy#>>6*gCu$ zM>)A?qKU)RNREf^&$x~Vw}l(dzcf@w3K#n$Tn#tncuTUUC3!vw7W1-_I;xqutW@-p zN$SqaN~s1AdZdrMQ`nFJ^ImS)+k(EnH;q$Ho<-qP8CT>>Zfmac*4 zD!A}s3*PkJQZ-RCjf0f`6s>oqM-)@0(6avXHfPw6uo_lvc_ul~dNQhX+ z8_1!Kek9a}0jKpuL=Suznz39U76i1GhYC3NcLp9&h<$NMWCs|lgXHxop(GPyhnql| z1P(TVt`a!V1o}u|00XCn;u7d<0>u*Ol_M$bBv4@jdq`kH22v(ioN*>#a7LPd!5Lu! z1}97c6D>{+QznKCjyxhWF=TLf!T~9Q<8(Y5EB>Lj=K31{p{CV*iI4*aPT?kS8+%I0 z4KwuvlH;QpF6|7_#PT2-1Uk^v=Z4A<*rYNElzRiRh&^k%gp!G4#yPR)l>CKf?l3fV z{GE*S93{0*>`mK=G`2h#3b^F)v9(q1XD6*7P!DgHKyb)hTLNN+mf9$dX^KLY*R+NNSUML52bQ5DYSVe zrJ8>Pky{8u5UVpQCRoB5J!X^>JJSYn&nS%44wh;DZwhOmgK91%7ydh_Ha?dfR1cbl zl9~KFo5&)x>)Z!&f#1aj#$oFQfgGlkoZ`fKSawID9D5iTr{hl;-^8|?Dj)MIYzSq& zy^P*gLSTDG@eAecdx~Ee**<`L6hT_shmeiZ$;|t6sfyBb(b8gHh(9@L;ut6PmGD>E zp9=APeD%mg3|p}&=|xpjp2r?BW-PodxGcvLw8&&8OH;(r1ajYP1Bf-5=^ zzEb*1S5QkfC&@G}#A z!(DjmpP4e@t}aqtQgwHks*@_ctH1E@1L4;jq0h#MDE6rMovyc{gNoe{S8IJjH?$Ch z(+RZJ0CoJ8Rk^|x^F0DWQ&+U;%vzIHd0~pdiw%PvRT3y?6;VvO!`}_cS)GV-WF&Tu z%YURgZuE(#I#L`pA>vy#5P$np*e)kF%66-TUjrh}T1Z@_(D?*2ZAvBn*6^?N|H z;rI8<2K_$}{tWKG=bx$0`uIw~%6|ns54dOHWU2BUc-~@QBXE9$ z=O;L)j1=0`8Ciy$LrCFR^KUYVb5U$Kj|-iG)efO@IGgbu^#)BjBZ)RDnuP*M#?=7g zJ7JRX5%3IQfEnKc&kb;f^-nB?kkdd4|tmj)b z(9RN4I5}$mBmwCs!U^l3=n&d z^Km>54$}?R$jFpvtq-Fi`Q#rSorVHPF>8S^7*5EIfTx}TlA8_B3^=F9uTYa0!9ufl zP*8<}u0Ztpa5@Wt`{22QfkFhnh35+fS|f1$1t14k&<%kD`rxaZ6*U)``1$NDfbN5Y zj?2*D!07`JI2xXz3>=EU_3&H`S1=p_ku0AB<3~CFj3R1Ed;q9OVak`ojKN7^_JpT9 z11!uGcqYLa3R40K-;=`hc%x6@48Y7I5~X_~Jm)h&3OOra1mO&YD*_5`CZcA=AMz=5 z0!#-uq0kSWz2S`hNJF{HVrr*|m~%|6z5$>u1Z2Ke+oZ4=BUn_Bb2<`yLA2Z32=ITC zJ^}bgaFWr#;Q4_8vRbqfbp>ZwJtCmcj}#V1$N3aG0Hz(BVHKXe;7sO{_Y{k%okNIu zR`ePl^F{z(?_;im=ia867X+9iiMcfTwvTzlh1gVplRU@4GZN0^*_~Q@Pk`zBB2s=( zIo~wQV=uxH6}WU!^(fwswur3SybHByj=&T^oC2rYB5)x*=QGe9f%Wh_$Ut8NUVw+M zYZVMcKnh)WjLASR7xCA@CaA|@cI&?ew zQo!lM;3-C80o^eOh%iq$&KD;4O!K$KKl6!o2V6HeVPju-su*BFX2LTa&KN;Hlf+Nh z$&7)*Hq%_rMEW8)VMVYmM(9Ff*_^Mo#RxggiFa9a3^YSTcoU%b2Ds$>H+b%2fH}Vg z&ns|-_a_4pb|!_N+^c*F9{}ckB2k1t!t)JWnh4(wL|8@4U)?u-%tJ3lKL#h5qu@Cf zE)6q$tdsz)eF!oCa1-wVm=(VjfbaU4JK*^;z?A=~Yi=>Ma{@6#%019Wz2q`H83vbD zZwD-dG3{YdW|=&lZd*~0XCYw?fNy{+ScyQvYJ5p$?j0~UnOWQ%f56Y-1pq$}C&l_F zJnt|-k@mhE8)R^X!W>H>)Tgi=FrN^KMfw$< zf5D~cdGdHu1ns;?%;%%O_?W#`VT%n;Fb{-hKe#l^UIFGmiTPr5FOIp%^*jKc?qe>4 z=iC5ORA_jB>H7i|dc{-(|0?q-B9UvWLdfG}OduRv)R6N%vJHP5eT>;QBIO+*Y=KiV z5y-p}iFCJhG6g)a85Wax&*fAMVa%tD^PYM2CJ=IpvjF~^tBycy1yH-fsX++r3r{~d zc|oJm!UKlzZ=2`oPeSr2!iO+IyAGZ!;lfwhywmRiGq7FC9-Olk z$$aX~zMR%O9XDhE=(Pd{G~ebIz}Mv8=A5qgV+a}EoYcH}D+!zTAca=wLJ;4V3w2Jz z{;eG6@vTS0i_cW^gXtc{DAT>iW(2u=EqirmdJ|17bxJHaP@+_q>>rzcNIDv zxar@ufYB3mnTg;uxO}2-u|$=VM`5mZ7k>niU4WGH0dg*!T7thQ);ujGU zPB$Z>Eh+sRQ1X2k;>ahwH2%I%^<}B(AtLBiM(Cp=mXNM(dD%WUqq9!~zhthc9=|BLnZa`Jx{Z^_u+ zPQFs&ujWiTNsJS73^FjrS}!n85^sBfs7pvjV*zoDcwv*c{2+;Eq|g_o(3e;*GCaq6 z`#AaMiPsRFoCvaQuwG<4(|Un7TfE4M14PMcb&8K`4IkAURY}@SDROI4_*o~;mAtPI$|qy*$%OYHAa33L+z4?u+3XR5;WFvKxFw1_|(34D%%A<#nt zYs(Q>qKe5uoZI~fh_qshOv=8bh6BvmOU>p@ZUdQFYIcZnFCoxU!%WGHkyD-Qb5#}( zo9EudEYDT)@&yWp!1*fYH~h{R0%MT+A+W+MXk&f50exon-TrP=Yf>tF zkNh5|6#3Qn27Df;WDnXYtfi=qwvv25NIzP&6|r-%J-g22?G#>0whpz#)E9oC4>g5$ z3ZKkE`azahMUZ}wYLg;1*eAA$0kuQBQwD8m0 zQOTgng5NPnPx@jiDMNbP;tvYa<2JqHT8jBMQ)N?;+)9-Dx0a;WO6vT#E^O&e;YPIC zc#+MCf66PWkC-H3;}cGr$mCWgFme_H9T;#5-v=g-4Bvo&-bT#4LWya~nE14*qnKR5 zlhW~t^!7C^PisJp3Mqzk^RJLPi{WyHQstW8MSdu|P3c###NVxLQjG=T`Wkks1W zz<9H%^z*6AqG=5X2_klwnoVtCgs>mjDr{7Wra|*9)c8n-+Zy5S2P0cprHqZ^8Q3e%tUH9|0q58aFboLDqkwn=4kgO_UZlqh z>{87LHxn@l==Vlu&N8(Im|1r!rTKtad-_dO2C1tp8>p!6nhF_s{pX4FUb>ORkB)1 zYGshx$|#J}^8(^#NGe|#G~8q;>Cv5zbD;rQTNSg>eCb14Le^IEZd2_)(Q>oyW8&38 z+I@m4J$%+iqlvXeppv|l0(r=XOI~W!x#MD_RefcY^bMxYK(KHRR%=HWwH*D}w1e-M zHVZ+cAJbYQ@8}!CcgOQk!stc=*Y{fkw^6DQ1EA;=MFq$i1J&r0#y8W6J|#Y<L1@`Q=n@hDK{kF;td0Vx%o`J)kb zZ5_r)nddN#7f?*vIYtR){)#5J4{bNMDq7{BTHJlay0?11wfI?=t+vCiAf6`LnRx;R zBh88OL4eX+-9#`iA7IVZJQUoZG&d-{`SRDD1Bs2b3iE@>FPC%V-|h3SM$ZK&e{A0c zo=yzx2c*96^nsHSXi;shCw#37MFBeJt1b<~;cQy9m;4qu! zY^*~kdND;ZKVau^vcr1}@)%Qa0(=?S7K&H5 z4@A1GzujoF=X|z@Gq}*gOw1#>@OPG*Es0>355V*n&wBG7V41~w$S?sZ-1sB-xC2Q~ zUX5E=aOxohV%K2N3V|q*mRO{e&x*!T8D92*BtV=R;0%HjYW468gFB3WwdE7=(e$&3 z9FNqoaQnQD0M@+pHvG;)#0z+6W- zE`IoM`0sGSX%+n#rIPiS4S-n>m(LZCmX@A!ZYJDx_j!VG4d!`(JPU{a^|COs2IC{h z8cc+1Bb{w>$a#Py!YW!qmOe$&KY;%xT+U((!$Y=L+a~ksZ};ru2eELd;lyeKf%kWdk0n-waHb@+@lP{ zT=RqG!j)rL6TJ`&oI{SdTFM7-H^B9{0s)22@>YQ9RS5io*q`9qQhujca-qOS&EzLM zKfa8VSsAe#P=0q_yP*P z@DBp)KvJOaMd(K(bR=AOp3Q$uM2U%{Jmnvf!A1C3n`jnG%^`PLG7axzQzoMyVkMbs zu%;Nsn(OLqAvydH`0xBia00R$1t$yoH^Or{1AG&H8$55o$w9Z1jyC1T^*7*$?}#4y z8^Czbt>ukqt#ESCtsgvl!G-w`lKgPZoty-g9(%Bm_&GK?Y|mi(74i_$ z*Aamg@*F(RFu>OE7Cdjjg)g&=n@h4siiG{Eva)UF1B9GoShBmKGGh4_xL=SMx6O)g z!rC2N`0`FhNa@>V@~_TvQPb%?Xqw~rZ$7XO`DN!X9o^ zQ~dnH&Uj+Cb=~cM;P+D`-2s@};L4nDQ*apq7u}w#@({Zb$q&Kh5m~oD%w~k1gVXI0 zcn6-h80e0`_walHhfl9Hw$wah3a#bV3#^=r_ht5IkMtIEr(u|EVM5L?iQ*>BrQeG9Fpq`5V#(mYvE+D z`?VzqhFCNVcJ~948SFB~40eCFP#o;;iF9Ng)jJJ#Gft3P9Xr_FW>MIIq~zLUuv>Nn zdIF1;V!C+7K%)NvKyukvM`qE zwl??|Oml`X)Pkj$ew|EnhVUMEe49*jlJGBhe(;%|Y?1J`m}UB2A1UKj%+cWTm|~cp zXQ9db4S@SIi7&A*mif^(_!rD~ISlU>Sg;iH=V~xtg`^hXp%6~jAkYh*o^YZjcUmO8 z-ENsb2}njw7-N{;XrajTaHRhk(@$F%%k=d&_!ms`Zs1E648uwJs8yGQ!89+a4grt* z!sYV3NF3ibyU-e($N`9CSAswbILWRn zJe}ck#x=>#nKk`tK#{~MPJmP+f(HPuFA*<5;0$4PrqgXWa0#bbg2C_% zgxiND(aZ0gFHh+EIfa}P;-J$Ic2D<^6Q9sO5x~d6$rJi#z;imB3~ziS{P|2A#d*=~ zMjQ@rE(gfPa5B8PAD+A6!d14sO@=qz7?3f*&p{P0j|12~q5lF9pNEq%z`O9g0~fx# zlOvy9#sIR-ASJ3f(SRstO&sWp!)_I2#}a)75}(0IiQ>1T$A^;=`AFK?03?*?55Ghm z0MZ6dO0+*b)o|gG-IYlGglK%gg6}IbJO!m20V4IJ$`Q^f@SFsP*+CDB`3%-^P6FF7 z17=tn)k9A=H1Gx*YtPBpWMSE%EwHfDz+2ezW;;3zCh?Jf@4Y(U%l9P>O^i;$(JZzL z4rOP9msw=)CIrrb=PU-;n6HKBYPj&dmQw1}3VQyizQ*b?-)ZZK;=F3BHk9B<+Aj_o zr)5#rB37t(GElv6r)96s8aHO*n5iQhCQYpv<&5Rp+|h7jxvsYZ?lgC`?y(2Lg$To8 z`swVE2o?TqKLkR9olp_~UQqW+MTQ5dxoST0cU@;Nemo+8H50Aijg+W85{Ih?_H&Oa ztc%sV!(xn$2C;c6KO+^N>0ZC zCp=<=8-peq%6kNbhTKwx?rAf0_bFGxVY%)l(%4f9hZfR{wcV(Ssq%Y?=mTC%E9qT< zOwwx+8_I(dEKMI&U7$X!x_zV_p$L5kJ7=f_n^wOtbf|wURvsRlFf~HV%Agj@6N3}g z#g)ZaWwDAN^-gO2Gog-o5Pfw_)Nw#LgxMVl`!*5KU`W7Mu>G0{lPbP{AeY!acm<|jWMGGOlgAyoDES4(DAl8g1PPw8jK*z~8X(d{d-de2L zsST81r_2s6kUAnyLqxRYImSXz8iz0+v|^JBVGd1+p*-H9I30Y`XG~p^8%x#!?oQ#S^n3BoQk%PWG18J|5;JXcp*} zg==z2U1tINltVBlv$C1lDMn}W36O-6!Qh;_6OVyNC!~}uS|1x$p6fUheZBA_Cb1}( z_-E3{nYPSEG$n>7oQ%Rx^^1obp*WzPUczY=fpxSN11kvzBTOEFkq$oHu+OAa|29K1 zmwM3HGV>dlOZ=vGXcnzYN_9``jV>7@iagaWI}y{RElQ6n%u~7O%H~+B8o{`d_M3)$ zZmKa-lJj8UQ8V+om~e*LODbc55<`bUTBEWW4g*8zh01D|4KG5NO&f;-M1urqv9h%% zRtZULI%ldTRsjQ%CB%)Sx1Y0ASi$T;Bq>vIwo31KKn$ZLl~&bgJn52Ms!E`1GSFbO z8B*Zbh7vmn(|w-tp={@4Sd$tnP=XhLaH{#H1iZ|TM}ayNm&HtPupA|VT0`|#sFYqq zj5Ee#Aml{ACx%W+{gNH$4+4&fD9z4-eD1GfbZOTkVj0P#i2;c$!LE{A&t zZVOyUm4zvRr(H+u+Ncg)grweS6_6qmsTM_=+4&-za=}r_eVO zU(p5hox`sS@s)WkzGAoGD|$cU9-;49e!Yq>=Use-KIYfg_zM4vUmo-|T!62vR`_z; zn;KYksGuNeOr$s2($cPxF+fq*xWz6tn>k~Mcad>QlbD`jZ(<(zBJDJ+#toBfjWDU`{0>z?HrmBz4kLKmg?z=O~H2K5AmY zmwpW+Z=H0@J$>tZ z2>3RjQtciWwM)|;>c;l~rfxK%p6;97$a?x8=~L^Wjlg@!K}DlX|Bv-Z>DlB@^hk=d z7RqY$Xo%Fqd1!g42Uei#T?6$>x`{#XE)pe+d-`*TtsJSvHAt*-Tf!UnYVn<=-Vl^tR27};OS8{m34?uxJ8mp ztA=f?jO#(6t!Vq8N+|e_xfS{e8VhK-+uqZI6q-OE2}9=}qI&H{>Dz-U+DK;QU#dO` zKl4|Ntph;bUjlf#02v{(qDqJ?awv=AO>(342tjhe^)&EcP0_9c> z4WOsbKz^O|sDC5U<^N8kTTn7^U&L12%G2AOP0P>(&^`G7)Tf_C7`aIIknr>-z0L&D zY`q-U9pb7hT(=rVN_j{=dtM_2S?{W0Zq{@!)Fxkfb*@6n-E$*jJr$`0sSgt_&9qzZ zCi@{+<<%L%H^)z?4htu+6p=CEWun{{@6PlQh@tu7c=5T#Qv}#z+lRQ{316Adb~PgpOA`jM8;Osc>06WBKfYHKt!IK_iY0h zLt|@(+#<+rvtn*;mNB7O4Ul4_9PyI^CN{a*!a+7$_cYf%hRs)EZkwmP*8!Uufhw*I z)gTuj!|X%I?ldpdiC)kd?&kGnrU{CWm2$$qw$J_{ZYzbI9r_65j2x=aZlj>PLVDI_ z34D^KXHxo~afP%ped*LP*o`{KN9j*MMNg?P^=TXXx$Pyj9jvb|V#rO%JAmhg_L=p@n7q`2qeq3J+>CP(U;fK_JH!@9XZbQ*X-JY@x-k?h+5?TZ9^+h{taN*Ta0tU&rBB*o0=lq_ znN_kZ3+J;1MI#A>`k{c{hcN1P01Sy7A8CXk-2s)Nfa+}u{K8Xk_hF;B0bz6`}kx}~0~a(i`iV}0Dx0dB8aH@n7-?d@i_ zt&*yN*-j=4$#Vn8)|hrw=xB|b-Pi4Sn45Ko+d|K*bBp$L+YNN1{oQ?!c1sU+qx-m} z)vnqXGqqWDZtrThJIcK;B&uJI+8*1lMh<2?ec$v3ic~ZZn@=cD1;3Wj;(PkJOvRCb zzJGe7C!Gc8MA%&+vDR(NNxba{KgQ@)Izo0Sp1wGP{bRP?J`3Zb|8ckIwq@RWWd>+% zGXdz0)XIHsq|9V%t2Zz-P>YGoJDi0IeF35Z8%ht9-zW;JYmFEGN-VbEIvIeXJIInr zO-Mh8K0Fh{u_&5?DfTTfwqm6hNeg#{%8{hf%#3`|+Dwn?=_}BynK@QH#=7VtAT@OL z(8wC-IXdeBgKTHLViwx(4(d}vSiSl5-Laa94og@)#X?B=PdL4#RS^5QC z$(foTNeo6(5&XDyC{GoJKZe?~DQW-`7Vg<;;HA9!NJT|mI8532hj8@|SQfBDtjKzF z*gNLcVR(X-2Yo&&C)?|anIY==D=8xBdUawQIPi;emLI6FJ`J;=35^`hUIf&*{-@xi zEBG=dUa8b_P65z7ia0g7U#tqIJQh;F2fVm9Iz^e2v}ZU3!-=Dsnlcjk$2?Akm3}Ex zOe9R=w1`*?H9X=7PUmn)67-jub#5H(T%&cEnCV;G(nH+%Bv!qr zH&5rVlbS$2^lB_bxZX@QT7CX>WRe8;NkexPKuYzF%*r66^QSj3L+0k{pQI7z>!s6K zo2AHkEdbdIhd~O>-WJB=uXPL??2Eu8Ci5xaurl)FQq_Ls`Cj9(4 zz99(5A%hK=v&%FQh>wI7;$+yPADIJVXC|$ZAYDtyQ!?16tqD_;!{j%NDHFN~=+j_V zbr-;JDf^x`0B_pdI=&>n3%mC0>Xk31fBEqwaarqy$*hs~0 zS}P`rD816xD6_z_Yja%yMmRN94~cA`^nqMlHN65RwiM;!n!ub!BX{MNlO}LD>y!d! zQ`s;*mAqi1+~Oh)tuk}}V<~^>0K!{*>E7u}_njCCHAmeHM(@WSvAIb5btvNlruCUV z`8M{+Ybmp*V8i2PVk)NWvnj~X%oIRLUxxf;jZu$_NK4*Cq!NYZ0;!t@FlBWLBd%8i zL)r;4eHy@bn5ICoT%17JzsP21W%PRG6!beru}vNRD2aLcoGA?rjL|=2H&C^a#~e{{ zpyumkQ`iEoO32~>^04gyS);E8y~t-lBUm>U<(q=Zr{5@r6Vo8QEs$ntt;zZ2>`Kv| z3wGNz{iXIOWAD);|1VAbr!jVK7~6=vF&xt+bz8j2O$?M3_?DEwjY+x+C5Mrcz;rQJ z-kWK1cL#7PZ=}B&piqlqw_NPS9R709caK}{88O%CBs5rOEvpmTKg2fO(b94w-C;b9 z?W9SUo^MyhF)DKgE}22MX;*}#%+wi}uPgH9Ebx>lS*I^<%uMdfP~c%W~;u=;AoW z`HSV!P!}$JKq7CU^2?#R&9=DZtPQL?!@@NqZEq;zRbs3e>w9{Jj1{~Fc0QdW=-3lR z{#0AP;ad5FsJetUa(06VeA!IHk^a9kjJ|5G-i^QmdH>lc#?v21R5%9;^|+t=iR2=Q zQ4Y!|wp&*6^yUc7!*tG%>;lR%d*4BS?pV>IK%6tM2Y218>ic2$eaO^K0sRr3a9}rCu4C@m*N}dSigyF1`G~YS5zPw5?nL` zCIoio&mcTK#!d!z!`@y0b@m?HiM=a!VJ`)N<>0?&@7e!4d)&H7m)^a*u$O}Px6*t6 zzs}x@o!EPB7xq#R|5kcGH)ZeO|6KQeLPJQG-nJPS*fr6;6ojY0b^g3|Bg1UtSlE=o z|3c&VdMEjPF&#Fm34WY8u=HE*D-^U4t1wem&dHW6yVG zNU3+sE~~46vBaun5K|cpFee{wd#Qh+@gf7SRx(>;MuIZJP0#En5K7DJ%sDnQU^9Dt z)*L^xR7@(ftNqL{w@u6Jr`?onQPYzB;+G8AlG$%T$x<vp|nsttZ-qWDXY}> za$oyAV@Eele&?jg@7px_otG{@B>ZRcTiR59{-6KqKbq9#Z)uapGtwrFXQcCjg#V0} z-y4#)eU}KLWh)oJ&l(e(|CC+eFBSw|BRP6EH7Mq;{?`T2E_YoF{xT;6$-ppVrhFM?5HC-9^Rk4X@Re&nv0i9Tam^%D}A%e>7V`F>q z*T#-@6Lsz&L}-jF1+LPSbYM%6^QYk$?++K_&8eu;?drI79P?UjxgiIZf7y2+ItgF zqF4(|%CQvIM{^trn8n%Y5F}u|Sqd-nbXg^UiR2N6_hjX3moYi!N>LGuUWp|#dt5?) zF;AwvT!jJa7eaPPInT12-F}eAAhHg#d_ZF%cC3_u+oEc$v6TL+nwSlN|K`&%Pj{PF z+2w1@tjuh^UPjBSv182HgEph(a2~vGq536m2!FW(qH4gLt6!SSA$G1_9Kr?(B0T*I z4s|PaFse-OsbO3aE5ST)^;)b76>{EhXbSx-5)1+o^cangKFE?EIAr7C{YgjI=@Qy3rHn& zHIfD+tEzSk(BI}$_PKhKQ{(0=cXI~^DGwMuVGV%! zO+e;+6q`P=sfe(8c8{-T0C|@5`@1<<%^Kw9AL8Z=3TWLcxkUA5Cso7ur1TKT&pXOg z>z%uBPX+-IhEsmdX0i zx6GIAC#!4Rv!FN=RklT86EHCixlpZ?zQHeXGf!^?D0TwSX8XJGscs_G$Zkv(4$YsY zjt6w&-g=>Ntxp4KZzHW`3h}!6tks16N1CLrn=hS+G!m@;`;xjUjmo>JgK_%WCV7)b z=(e@{U9gEE4V;xlYnG#jEZZbGUN&DvxDcs3n(|{&UYJFEnozJFn=|r zVAc0)pWL}k6(YG0!s}t}-Gsn6yQB7FkL_uJNrLG?K zZ6XwFW17`{%ve+Dbv*qrY>qJ)I$q=ULOy*O+#ES8CsgkAsX(GUYs8A^nqJzw#9-&> z7t;(j;?nB;MO=FkS~zcH`T@{-DUG8$=aHEr{R*sE>@+uLU{K4q3vqxm$*^hkNHw7+bXoCrqjAARWRVa4-mtm5$Y9b=>H^oiiuSs)R^9u#>A?W zP(f^)&%~u^R6a}->J?4${uuOm=oiX)-Tc+oA{ryZ<6Wtq6LlHaOI3s0vf7PzaTBTT zYKI60CzqicY5n{+O-2jnu^h;UqlNyo%{h~iCnhPh-(UKyf{P0Pm}a!UvTD5&#G8&I zzD&s;Xv_U#f7XO+zbOL(18eW8@@eZO=7UZoUv|H7W`3vuZEWfa4z$8K@X#cuvV+rU5q zBkE<_SR;?pvLSVsk=#0;B!C`A{w=G6rn+vP%nGRe3H=n(;s>Qy);f`ATtA&wo(Jcx zrJSrKdJ>W})pCCm(fEpvES~{cSb-YpnvU}gm(iNPH$f74-kc`SH>^C@rse-y6M1s< zW#w5^<;Jdr9BV}iFz_*r7Mr87?d=QqWnU5y?pk2((m}mq73g+V7s%)p(_-q#9nN?A zs`Ek`tvmjV){BoyUc&%CvOksPnMo!-?pINp`x=h$Hmx=QT6K0tB?{qNU zfQ339%*m{*ar5-_GLG?s<8J<=DEdn&B1*yH?Q)*0GfoA`d9IIX-NB1F&~@>A|3uez zCv~LjemT;$J5n7nh!;{%IO%7@9t=o5og(#`5$vC`RAK4#N%8Z#@yc97jxim=vt_$u zq_@vV*K|G(^Gnn{RiJAcEFHJVu#&&tg&1*?1;^P7*=+vuI6JQdL@?n}x}5gtG~|0c zo5TsrFcaGCf&XRLAje7C^t0-Q5KaY^ff$aO{W*=aQ}S|howN9L8FHQrbFAs1Zq$v0 zTMVwEwQl}kH&gm&qo*RTH&LMGpq|>rj(baGXATS}^_%7lLQ*c&;M$7dq%AH;Z;hRP zPj|xdh@>tA+Bq0W1ZysLKy*Av;Oa{vIGzM6`teL|*)B3G+AFblVs290Iox!YSpKQy zmbe*Aq%nJf@U-c_#gm&j+Ghh5c3lKh16c_=X{=P%2Fb^hZFq?PF`T=Qky@Qsuj3U9 zQ7XG$*66lill7M4JY-~?=Lh?_d9^Oj9BXfwfiOq@Zl1Lc<MPvt4C4?Q^U~+ejM(W1jn8+Gq3eB!HJ!2WBmsp(>PAdT zpU28b%5v0lSxn;=u21K4u339CxJ~*EI>(4t;om~S-D_0U$;7p*MhoSx+gauL8P|xxrYK&m z*|h#dHt|h)cUpc!xpRy<=_4uZtJp7cS42!NMl|M}rYJQe!Xq;%4x}*CES4BGohj|f z#n6O9378x%7vp!aXoQf$;xQI<*Xfy&u_80Mn^NNrw6|0SW8^lTvU++Zx_mh_uOCDe zW_kLnnKGdFIIfD6b-tSm7~&PH4c!nZ%xnElgD4ZF6Aox$e9gIJjtu06R7w_?0g0G3sy)=72C3(d7xb z+*rtKlsJtgvL}^#sNs{e6ZUE&=-w-vP39B`8W};^M~#D?@OB?^;f%ZGA#S$=pw5^7Ty4_HR!jTcPOJr39Qn!7!?a8=sf?Sa$tRl^Wno8&Q_?r0z#`?}q` zyB+(x-TS#Mk9ND)*AI3(4t87CxE(jSEpbuKi(EuV6ocF>l$mZPkitTH36mE}x0UBT zE0uk@Z2rkvZpa6BrGUtW)&{?7JuhOZS}erM(i&PmCO75!;XtU6-Cy2;Ym_rjkHCEA z=`Nu@FnX-3Jhw|4D?ZW-o-YC%{rE=3%NGH~bAuKrvdjq({V-$%dG3u>I(<0bD#PJGN z(rQ-eITT|*2;%2J5EQlkBSh&L?Lr%dY@k<9$c0qViEbRl&^q}|kPT+2qr#nhJU zEe3}=dJOXjDc*7@v=FP?*bVNWpAvBE8DpmOZl&qW(*ekD@THS7tcL9oC>*8l-N zZ#4rs1Nk0k?G{Y^=kA8aus@}7zu}%e3}`$9VU+Ns{xZl!zZ^0p++y=#g&ZW+uWyEq zwHzi~yuNGdO-wynQs3M)^#-PLI1tyb?wX1V_df35cTK&8spkIeYrCf2%GBXP`ITK$ z(JqAfI+kq(w(!Wo05%C@=55jXiwKHA$P0k)K7p~3ed?0 zh`U}p<;P_W$Q=8($Z)&?dw5r{YJo!A1=L9fYU8d@+(`qmLIFF)fSIesP0GkUHGmbM zo>A)yLr@tGhpHf-5Kk52nlf)kg(}eC+aaMC7pO5T(oX}grME&P_r|UgzA=z+TwjP5 z-_rJ_`|VQbO3A+k@<&TL8!dS^W(2({`|Nc4!5*oXm$w$9|sGt6eAiazVA zzVba4cH=|#)fgGG;yLoIb#t)%P@vDp3Ih7Mv^3Yl0McAt(qGv7RXd1gl!p6k=7P!{ z_ME96Xh@{)(){ItW&eT-h9iG|p5@;h>)ZtHlo#oR%?(_6Qm2-2;~NKQy{lOyEDA(e zKEvXg|3@tT^-*7Jkr^I)aO**h7Y>hxp`N!Isw0CjEH=wQDqe5%BA9wMN0&HD45VCT zRx)}w5{GnYmJNbz{TAsdX0+?yaKkXaNG+6;2Ira%X6MttoTY+}{umdp>}Hy$FPkMf z$lO7Hnz6}G(U;8PQcWJ9Lv4s=&Pw1{PQ5PRNtpl98P~Pu5bI{N7I!gO;59rYM9$!t z?te?*ZWqEkK@kNkWFr8UyOx%%0fUEhBsUg$nvh1<-oVh1gYtnW&b2L`*v8O zJ8-g^M?|hRI*fLVTZ?TNSE!Gz7WV02v0g?P{6jV)ZFm2Lr#H zA(j%SS9wRuKeoP?@boW?>WeX6$KHN}Tf(SqwD)FOu5t9G)YE&wgz7^p5wG`(tPS&y z=rSpv$nG+jR2WlPhcjcII)Gqj@RNC}prAlt&|#wAQ;0XW1<<8tde=yjoOCE)G5kah z0FlZYh8T9rYU#u}5&cOdBeMjGTW;{`1-Pw)LQll7EQzBD!V9u;VGxtvDjf1+nfxaV z=;sRG#nc#J)h&aGF+(`J47(O{Y}luM{_-n;uzEw`-7(+byow<25GNZzVYyf7-KO&@ zSULos0l6Bo5N?KCE)*f*Ap~JTtd3ow`P8b(%b2@CcIy9YZ$gWBBbKVxQUZHQ*$%-0 z8w9%UQnHz=cP{Jfonz3Swpi_9A@@$yAez9XGgkZcc9M8_9v1PpLet`Ujnt3~Auch> zFN<_|a3Nw&v+y;exSfd#*a`zyw+&>J{=$O2EMRh&ifbHkeO?o=9T*#MB>tkKG|zyb zM*eygAH@Ad{7PB5zh-9o!aWz(Wlis;sn)_IU|`UP@nScRAmN-5Re-$)*HZCn3YOH$ z7a*cmmf+;TFlXnH*04I9n&U7Fe)ptqtd_M#Ov9m2cr-?Bp>mqL>jhZpO6t|P`uXoy zgE^YRcCFU{Cb(|gfz7`j9>wt`YJFCy3&*`l{USBftO-Yl>~idEt?)#NU>^y!hE2I} z%+1j2W?-Q}ZwRsLjOnv8c<`qdDOq}l=y5WE6$-sx`eIHZGe}2o#Q2T}5BXM4?hrQ* z7qdMaan8kT#`R_ac>pp>x*0%ly`MnfbDsbBy%>OEV+jhhSkweX z;L3Yp5=R%fa+eqBwVHREm#i0)*RwL9;?X#O>taLVO!N z8Y@A%3`<5_6RcvkJjN2&JaqRlwi&3OqbGRU+B{TO-&uC}}0ZTx)@D-`=i{};e`uSsdWAF*EE~^3_+EP^S zM}TVbfg8hRp8gw{$iYTygPVCUSbTVXomoO^fCzq=hocKjB!z?R{Z4?O3&EB#ipFy7 zkLQzcyo?mbLvidf7q|M$MMZJSBwJ6zBOC+VY_sUaw1R3kd!PN>iYfzjC}8lu5OU$u zF>+7e5h zsMYEkF=NnPjDc~o5N8qbiW`Kbe~I$+DpqeBXW(>d>P;C<|k+IIz9>1 zo$~DGc3kdOR=OQ4xdF%(QRawsHQJ%>tXnjypP@2Y`UBHmkWMmOocbtvsED*RBbJ#t zA@BFVk7hkkk4SHFyU5Iy3L1? zw4mJs(~ncRF&A$OK8y7y{{-aybEUHD4CeDtddLe4h_Wu;U5}s z(t*A#BL^vc*?{p+N?+<-v@qCHn6Cc+Rwz)0>ri|)Jcx194y-SuH(*WFOEGTni-8d{ zUXlxn@$g)k;mTJ;Hj5|=ka!a%LiO&q585r-R~&VTO48Y|;YP~}F!>%4(79l)L1uv> z>-f#Pt=Njdp(M4;tCQ(b8@&djOd89TvhcOzF-k1Oz{a^UwWg zRNO*zn+?kvQfFZvoGE#WOr(-TWkmJlBnnL_H1}3aIS%GP00C?SAB>e`D38(!io1<| z6ITH9bmm$vn98iw|GNPF6*6QC#DffcilHpRoy7hy%09_BJPFS zCD0AXJnLy0&Y7tt7tccaeynKAi0Q4gJ$){dC-o0z4)b5wEw%;J-)ES!24`bqDcCOl z9vsL*o(N{#N45i&*OMCX_C`gSSy?;lv_{N2S+v<|HpCdIra+1y4UA&5tRV!;{`|8L zf|BW{pEgEx024I~=mOzH%7Gizm!7r_GCiEKEf$OCdecExFmxk0jAQOjvHv|yf@W9m zW~6d2`TtP&<^fg}N8j*S`ks5{&cZP4hzJM>D9ZpMMpU*zM_GoM0YSm*FdH|{#+?}! zMa12>M0n6dqjB(Y4^a`9#6%Mk^@+xf7^B8S#keGD7NbTJ%@f{VRWGN{ok2<7_xt0U zM9%5zs_N?M>gwv{^!0G{nSUDZVT{2oNv^+?TMBpQA#$g2M&8-;lKj3A-Cy6;b3p5h zbbaLj|799@;RtyrXL({kIa0K`tHw<47@Tb7B#r`b8-7zR8UEk3A&Q3oc#-1_%&M__ z7ah{TEwyYx#)k8@VwytXDBVhlbB_n^?3i5qdxr(9;D8=3LvKdP3Q-j*F2!PHVX1Z$=?s60`EXo%Vs^FS62^$6(b zj(?ad20(J#s^kS%V{F9XGTm2bVHgg8Tk!|~FM6VE z#rv?>MYHPX*2P=#UMw-0_Bsjj7yBP=AXmc2X#A@K3H#%Q86T1{{1Bmo>o;;`c=t9L z^yYw6E@!0Y=~lcOCBem5a`Dn`dV(7oBv-l z3{->5ZtY`FPhgRwVFEM&O@DL4`8{izjn01+BIbwJ6-b651w*|KNethUcwg_kXYqEV1D9J!NK3#mtNfdw|z6rOQNE z&Wn{eS;J=b6iDG7X2=s-=h+9eu3V3GKjRC=bBFUEW0#cYU%mdrU>V(JJO&4kNomW} zxeY!piu5nBD#jl{LLIzl*CBZaTbU7@_C$D9d19=GBVrgJevCW+zWbqa6Ca$f+{8`s zalde3!fl(;kT~c#j6(P_2c@NO=s`ZQgvjatZ=PfW7!Li9=v0kf39jP4@^>7op&`Dn zbwSaU*3F%f&v}h8Zz9v60oun#C_;H`O;48kZsE_B;BMg$vj|t@wWyjGtf&|c_8SP& zj1$Y3(H6?@!}}!PrSms9J48b%4%ZymZU@~hf=0x?OK?89Bqhx!_=y=F`beDC0~3cs zK8((aMTuOk|6D1JDh~d0SZyERE>nqic~s1rf1?;)(mFUtg>9@5slv8@gIOeVZ}eIJ z0iNA*i+8`3b^_gLRmPQg5-o~`gfBxwVj8LHot1lY+>kB*krX%@Tj(zYa~akA=Qk1F zWZl#EAKgT_m2M)uQP&LXpwtaMi?`@pR>ogVi-xo=xa&)G$Vc7$r=dfsx2Kr*Cvo0V z(SI|%?_#9C#gS&R0G|Mf?ul^krhnKL>Fz9aHu|Wdr~b+IC_TOGoKMEOkqT80=rR(u z>*dh|43h-YrRWL@?CIbVNV)?^cTmOozrLiO(on;Z)Aoy$OT3OO4%fF}8kE3|bQsqB z$*?|uA=Y^OBXA#-Urlf0>8*<12GLt1-cAixJj^~a@vuBlOFR8*aYdmV>wh3;w22Zd z`cM^Kpj=*3!=9yk<#C9QDfzIxE^Y=nSo4Go}t=8NkKl}Xxxsvz2B0INYN$%=eC zF+e7NNKD9XRg+L_urZTY@vzu@Dzo-k<#IU;p&${^hDs+jXrgerT!5o?!_pTZMe2@n z387XgR#XxK-<;@!FBgc9gwTQ}aCjPKaAFul1AAc_+1mKdkr4kt7(l z#MUMGK`RoBQ(m;u<4G_YNrKgt#0m`kOEB&nE{U+AJPGc%JOUaPxQ&>?PQ^kgmc^sy z@p8l)uEdkz;hHlFq^~ZpK1!7_1)?&vOR9LA%9N-M#J^NN>p!u-#MxVpJ3#Zf-i$YThh%nEp<+9XEs~k(&5xrb<}j!r)yhV z8ZwQY*?OmP$?WCn#Vwg8r*?H`%bDqVAZP2_>$7X?>zsAjnznR%N4B%JBi&NdT<>I8 zrIFdz+ESP9Xr)wLO-D_-V?$egUAlGESL=ayR!vh=D}F;VQ{PnQv~y|evKf@Qwx+4G z-f2FurhT>3QopWtb=GMv#6wJy`rm|jvnFFl>Ow%69Qq-(N`?derDb?JuI zY;#SAlyu(wblI}tPZ^%O2 z@olY{mX7)?GCMk3>W}5$xXjcdS(B|@eeC+_)5cGmI&6I7u;ky zoN|~npfdlAu34XMsc-Kf zHMZAxIBhlAj>%4CRdwZ(Io0VC%Br1ob91`6b!~lfO*>k-Ei+vzbndL`S?PI87S3@h zXH{0tTAnUjFuQDSdVbkLrY>K20>5X{_3JyxHmb@O%uO#WL$kHllNQtUXsot+G*o-0 zHl1m1YjRfCudmBAQad&@wbpc~hDY+u`VH$^vvujl%-Z^vbej~QI?d%e*_upyecJpC zX}Ug}ZO!WHt?F!OsL!S|Et!r?O;ft5zNN8aHCHsdipor9*#_Af+o<#R`-bLpU42`}YDv)nFtuyCy{4hwQF^U!COQX-%c3^x$SUz) zeGQSUM)Oc9#x$2Kgubh)%jPQT+ScYauyn{Gw4z|-)6=!9YamxW+Nx%4z2rA<_L6z? z$|}=U)wAZDoSrp*{-Qavh*JhVE&bK@))uF(AzQCHY<0SPLD_;uPABX;4IAs2GCAGh zwAW@cZ5=GWE{#o%WNmC}T?Ngpt;uG{`kdyfjw~6AVvee>)5=yIxuuB}+?dU@cfyp& zPgJLuESy_5&si(&P&HOPi-%fdYc?pn9k&7o>QwwjJw)-+6(igub$ z(7!orQ-Ndwh~}1}=Cx2XJV&O^X;2nG&FrMpYbV!rI@!)u8`A4pDe3g8&P)@m9~DeD zW!gKOs_OKdMGLFT7cMDt$jhWnVQFqz$N;bA)UsRC$|#dIZoDJR2jx4fGA%XP4RDRM z=)LvpjWCdoeO-H|u|-y)Y~dUfJGZQI*1{9Y(hFu)ootZ7w7s(#PPM6(x}XzR9$Bu| z@w{2{tI7;rP2Jj>mfCvqx0Q=#p9F2D7tNbjRfc|3g|4&QX$*?VK}Njgs(oWQJ=d zP33>7Zfhn@8BIf)iYYain=+ zrb#tDx(0PmRH<|l|69ucPUe57@V{K`(j%Sr_WH^2fa^ODWvJe5A_6!h)H_o*86`M5 zT`HZ?O(aJs2YsuxPB{`~mmS#+Y?Sk85OEsXz+!E>uA{XE(L;S*yY_J0nP{M+*0%;P zj%(1a_3Y+l{ac!}Tjv2ZP42UF8U!Ur-AQo~$3Q$pQq?RW!5uYkE}c9*Bc&qcDuv|? zoXQ4K`Bj~b+Nn~*uR{<;OgNoVl#~8I@xPUz5NSmc;@+hfCJ6-<;i{>tY z!=U13WgDe!A{uBX2-z}7*W#UrS&q0`WT%nz=*U%2-~6&_L=BxSar^3X^i(_ zkFN@ySH5iFk_F1(c$BKEZ-SSH$T1zMG9m*Zi7IWInm2!zF}`%CmbRw7Hj_!Wwlra6 z;uuWUwrcr;*^B04@)I2Zh64Y1gvmF%CPp1%r`ywd_x7GuhhCCKx*T zo%GuFHaPc=204y7X?Cuq)0}n$r0_EHW^BRViY=FnBmmN+bzb0)3p zc61F)qv|o+te=(1b(l2Q>aI!gKL_)Z zxw88FPE;DJ)8FU^B2vEV$b=rs;bxo$Vr;H19j;2G{?+9wQ}TbXp*)OYljA!s%hJ> zX7^Ic>hy`Ts!r@l;-hAl8xa;_CgQX=wRWJ!?O8;eRo&)%YSo5^GfdjhF=HqHa`+p(@kRp~wD48e1teL{L&at9)T~F4N3<41i8EW(v7E+EwVFTn_1a zicf0Wj~A$K?`)E@Fu2KdduJOATZyd)Ac%#@n$%8u*23lK%2}t7?MRiA(D8wrfKqCc7Z5vmk$%z?bOB#V9 zF@+gp=x%Styol!IJR3%n&RxE6)&k?{*~^j}tHJUR#wl2z^d#WGA!0*j{p2RBoS{3W zW$TdGQ!rxFe2JqC!m|d&9v6qm=Q*mIDxc8q(i*$(63hCxo<=cLVJ#(-+ecbcJL!j&~BC_jT~BPp^IF$QMWv(qLNq&2sO z3}|TG6zb#AU(Et};#!Zvp^i+U6;J7dtT~}^gz<)#@dY>Ykw?+2iCr~?rO+}72(MaM zRxy9poHDJN2CV+dm=!gN37}P0S$0C%vI;Zk$nZ`L!CDPmOEL=SLfE~MFCg`BgY&pd zeMb?>g|$oz=2Z}Py_~U-=Fh6nwz4v!L8pOP5UU8L7Yo~rTy?FmquMl<8!3opOM=3s zBjH7`wi+zuQGmpO6u8w^$;oY9W^J2OOa3Yi4PbSslbyfrgcPqxL!?PN1thDR>pQTV z8I@flV@R@Ej#bd+Y7x~A)Uj$R7kwZn%pdESQVVK)Sh#2*Pom6PK&FNjffZ${7M#O6 z6s;yPTK_T-)#|4u;dUP7GH}MUFhoW-drA3xEOoIiP({5KhWToJb1eoyED$)YSbb<} z#e$z4OJ#6qrl1b^!ZUVTU{V8oMRg_dZ9t^R3&2%XWs_xxqG>xugGMY+(Zn^=0?Wwg z%>Zgq9k5~v4G}AAA|Avnoi)E4>w|JN*$!pZtcI*&L-S*vriG9&Z}S}JbNw96HWXwh*&eaGt7I(S2JI<@ds^>qk0u;f98vxwK! zcp1IaSzBMreG3`nPBIA4Xl=;R(;uE`VqoJ59}B?7iDgSYlhO8yN>C?-&=`6qgTN?6 zu3lk=3r~uu$i#FMt)g7%%_TO?rzs+xH5+qsWm>b#u^+O4FsTzvW;1cf4zPW~47ATB z6z;Ex_jzFoBGK9xCv)Q*yESB>qt;m#F5;CIU3w+r{6!0A&6fihPskAPt6^5hfC`zH zhUZn5l||Em`OD!~q1q1E4#`(v4O5b<43)EKUqP2sK8J&szfoEN6Etq3AXwPuwN2{{EO=S5k{sY&I_ku5%S zwO|v(7{wCV7-x1{Y@QZ3lSQXOKasWbCKd*h`nJyLG`eDQ56d~cOv_E#F12r>?Mkww zy18a?9bIQ$ISjS@R1A-;ozyNghO!B8B-fDvOiq<41Dykrg7R=2E%7wX^0CsxY7>zC z4xTRKz#`@NB|5cx(w*wRn1A`Fy9T9e8zye8QM57ienbd;$i~QGa|8C3&bt)gO_x<-_|RROwoXdpyCtRZ?UK^?eo5)nIPH<~r0N+kVdfm)GbxR4nv@u8-sPY`+2m$*n*p!apv zf*A&C>~qo9rJ7}8)nB@9=S;*?HM*T;m(pVo8i=CA_RdwZGqF02Uz9!(LDn+8R#g>S zh>`Je8ezMhrpd_7Av?rahz$@Kx2PQvH4WfGs%7kk>QL7wGm{e>vo&Hy9PA=@WZIfG z$PgPtZYIlmqge&cRXB21YtS(pu8rD*_i_QsI1nw{N-NxI1s}k6%p}miuioLeh-&0ws+!B2ZD+OWgna|-eA9xX>20(~tW(qq*dB#w z2)MAQF4rdExoXwMd}JmxXKHP#TPp{DUKQu*E2g^K2Q$zQ7JYShkri8oO#sXlK&J<8 zgn`(qr3oo5Ch}H^rokXbQ6#)BHjuwsUz5SQH(CHYL8{%b>t5!dkMe;88p5$2M6*3P zOJNtJYKickmMRH@b`t1p!wJQq z#WeIb&@^z)iDh$6cIp?(Qz0^(>g2(MMU8K^sZl{TQZ!6coQ9!NF09KPqG(;+nvA8_ z)nqX_M&nVJfkDu#vY{5`faFx0tOYf(*9hn|5O9s@N=5T#1l5^Wv1w99+ksM%%7@ny z70cFw*-dj>2X?V|!lSLYS$7&7=iqZIGy#)plAEJtRdZ%x)r%Fb4`A?=Cpv!-w_BGoXtxqdu))x^>%6IDz{1j->u8U^o8!^1YS)hHz1cqtxBO2KXxJ|)g*e^`)!&<_xq zT31fF50Idi7RjT^O~k8I?g7%9&T3cr3u&?M&|EiRH5QDUQ8gF=-j!DaLX0OZy-EGcqVfb#ALq3Zz}?`pY_-e44swRlmB$9R*D zaXk4&q;w4lmxpP*$TL9g1cJbr_0bzI@Zeb=pAfRGVJL=#}kVy`Q=#Ycj z1F3`RuTnI#iIRYGJ?PoYNcbm98x8c7<1@YSfULkK#6||ioi#Xxg;$zd5GpwfJY~R+ zmwFU{N2V43te0L1Z)#@?Z#1~loC~jTEhrK%SkS?2pI+$D2xXD!;z9ExJa*jeI0+|| z=T4ol>|*&Q+zNgPsx`m?NQi&tx$?&agNn-)rp)-j=ZvsXgPB2iWjuLbJo)Q*a$hXz zIKkK}bQwt$p8QtQT?s(=j0VdxK8hy?U8#sv6=X{P8|dFQ7tRk{cZv9c`&&=k>@MjN zzxDp+0q{3(p91Xjx_kh-@T>L`W4G8X+$F&o>XfcT8p7Fw}QP?Y!F56U885IB1-{_9E5r-3Zk4LsN?ErRvd~Idw-vIe z%2-jARMZOG)f@ihWLZXkv32iRU5=+qmy4>Wy12Yuw z`|ta1j}?h{*?hc7U10RCM^9owPvU`Rs2jTwi7ya^MFo@JdGhUdB+yE5d&rl7he))Q z%$5NWV@^6E2zAXIcQI~sqFM%1)NiE=XOK6&OFZH};^G<6A_0v#j2uHpHx+q_y~*M9`~N}_>J1j!2$I+{ur@!=hKSp0Kr2UO0QLRJi+N{sFR(+wMPc=d?9f{<4ba$I1E9OG*j>?cAk6WNvOj3*i~ zM^b03s-muF)WBU)_F4-Ol2%paNUA!Xh1H#%)$IZcF_#@lr+S-(wI6of5qn~XE~Z)a z1$T5Yom5?Hg;E10(Zx(MXg6JKgF-n7DClufl1#-)(rA9*e}VqiSU4px$P)bWRfTxV z{lJx@^9Sz71o+s!PL1E!dAL-iLfqm#qyP_j7@{gLMEzL-{_K5BfRDW`zT~mR-{u3b z&A&$h?xCyXE5!Z&9tGIrUrvG0<>FQq6x=G_B*2^E+JxkBZQ{0sTM_u30hKFQEj{SJQA@j;sNZ7L?>%@4@t$l}L?v9! z#avCmxsHpuj!1C@7o&oIpwa&ivRy&L^#l#-3HTQO8iWJl8Xp&LSBV|6ZGpvYyO=8v z_#XdC0jw+0ut?o1UZ>RSs5nww`^8qZ^N^R9f^}=JbD&iYK33n$3uiA?cdz>nRrekD zk7UIV>W`>6;2*fSV5mx5<87z#X1n(Qr5^D1P->6&7Ny?u-Xk*x&3oPl1pmOhf+~20 zkDpOlEVlZ%qr6Joj#^8s9sW*&@ARK0_|yK^sDfV;H&JzO62GC;Z^YG9-K!J-oj}ZB zTd=CGUCp5A0+|#JepGOUc-}=QwgS>!<>PS`q`(DIB*&!*cwfydWrmC3e++#QMO@eT z4^vnBDY_aB#jYPdq!1jpnnN#&-fSvxFX{t%ukrCa=v7Qfp%XWXjn@-*AlWGX#xW$p z9-m$uKYv!$fLRvP)+Dy5(tEMvm38uvB7$wbm%I&MX{RqIi4c~>>DoMnVjlBSDeSK1`~jRR3>5o!QY zBRN)4Q;)(-t6U|EB(o^V%uLeX7&0>rP@-hw`j3)KZIb@RVy4yr7R3oSE9yQF1(P40 z)O}qxZLnKmp_^e?deOzNg?{1iBPVzToj#7eSuncu29M%Wi&==>Yz9b*ih9in2ZSGf3+|l0BO&h36~dV#tD_umToX}dPeKe)q)HPyX@r<*Smy{)tul-z z+6-n7nus!c65@)u5ISju*lAej2=Q315PJ<~58W=xWFfkBJ9n;DW4zl%@SJX^GmM~< z42PUv#nR|@CLOj-ig!Dm!`)8h?XTM%5U%QW2ZYNOzb9^%hnl|e zR*qRpWd^$!Ypsre&mXZIJ#l%OiA1^+iO#b%2Z;wi7m@TuV9<9Zme5xc2?B%k1|&8R ziCK0C{%K0!-Ks+{5k2%6nsExp58O&!e33}q9(0MJeDTn+ z)bVrp@{DchIG(ZP=AnW^$H6Om9xiVSj4XLPVH-3`PS|qtv?PN@X-RUaWlaW+k~MKX z$Z7G0oP$P^XUpLWSq6=QEC_=&HqAaTS<^|Heb8S}5jW()x{1O%%aEmF8Gp+nnV|g{ zH1a$YHE%u>iTU+N)Ssz2Xrdq@;YL0Q?RpZCppoZikjM~8)W{E+5~4;vPB-$8OePw6 z3pMh1e#)k@aB{;O^!MpaMML3 z2h?<9O$j|T-2{_~rrV&J4o}&1R<2f9PSaT!v8J=&G%e<)!|a%w&dQ55-4r8B_olP* zv?MuAXJN>iP}5OOxaq9iSku|?sOdCRHJwi8G+mpK_kf!&A~~R@yUvu*L(|=8GSPHr zsHVeHD%r}_3d?Ca3nSKa7Mzw5xaqK(z)ff6#hUJBBTM(Dv+}egIZbC_$eK{oQBAn% ztlU`B+3=|8G*mU6PUbY-UL)@TH(f+>Kuwp|!>DK+W|7H6)0L~H!&55R%GC17Ok9CC>mZq^EuKv!>}3b9oZ#DkH){DW>AC#CX{MqwNSx!^yvMTBnbiw zzdLJ(Sa(NMjEw{&sagm5a(3Mjdt!&ZVa+N9?pR~eNh%D-FTQGN346mP8Fb3M;Y3k* zmW7gxCt0fr(*gw$p}@>_5sF4xe71nHqNvc$Tr36;Lth*=LIttRc#jFW$AR&ay#bu2>2b9MRX@%e>h^)BRk!PKFjH@QnJIvxLZ&ne}cQkfH{ zLS+}N#6K%fhe%7{Db!!ErCh!VXF9T{;|FV3h_889d5k8m@~#FzBRdv$F;r4~B~Ffr zDFc4xk4k0Of<@LuDMe#v`PwdAYb6#wV z@WY}Xhx(0so-E;cNsqRD)$77`uINfUM8_QfcqjpeD=db5#zPA+PAyo<8rbE;--A7^ z5+Izxv|T$G4ZK+V1p7T6tocP3Kj6GrT;yFrk8gSR(c>BKH}v?EcL8l{Z}D%V$IJdl z^teD=Py5+B#G~~1rQF?qUwlTXed03gZi_n;576Vu#INY_ZsHR>;xaiwL7Nt82!1+f zt9P@9$GskYHGCNk9lS@68|VbZGVwG2U3&b*-%6KR!}V!WC(-8c!80#BQ*oowh}?JZ zW5d3`AMT^m7~6e%p{=?1RNfeLyI7J>)a+BaeYjJGQ4%Av6CS)qij8Q2Xrm<4PE)xE zC}7?ucNp!+V)s&Ga~jJDBxtiHai*c|U-w>zO~hJU8K{j{CGmPPwP2m$cQ|bLSKhDC zDGWc_y@99Hyiz2W$HSdPH=Kq=BE-{B7Sh1j9ku5ou%bg%i$v*>C6Be;QM;oSDp3%b&?9ih~Nf`_F1YujSk93yn?x6E`>i=>UiK?W`x6yJlm$*eT&(9V{HMA&FG6k^gT{3yLuh=I#O zt}}(q<;s|}EF?;DO-xeN#H3Sxja`U=%R(Mfg%nAqQIdxTK3ORx-2cCU{uNcVn)0^m zZ{2dPc-y^1dE!gFS3?H98lIoWUR<1?_jrmykEfm}kf0|Dt}bNI)rIfLljh>R!cU7B z^l8z9#S-hm;i>?8BLVO`|0MyyOX8+b0&WUl4GAOs z2UQ&y|3K9NxVn&Q2>{g+098i-sEz=r8UjEy1VGgf0IDAVs&>^~vD$GI7wUF8I_Cd_ zRJ&|j;ncY`;u-gI7mqvq-98@w>~(!H9@iIt2U_2JZZ{(c4l~4apo=*tBsLZYhn^^Q zy06huzRUeUdZ~e*S~sE7XJlvY~Ic#v2v=z|nHjr?7z4&zjQEKTwRFPVZSc=K_(ujthxJY{TV z<#C^&OnQ;IJ?d9@TY33!pL@giB{MY6s$*CYd6Oviu1If+E? z_*#MKZlv!U#M6Z@@=?4Gi>~QK>BoEhmYy7~SaS9l%ylc`FLaC#5fK8i zO7W5Zx$w9Cag+F5=%QbPx&U}B_#FaL@w?!bJWLCA<~>i3=kv5+x1^A~CG~ttY;q3i z63?eTgfc<)Ve0jKkiDLNV<8Yoa6CwEJha%-PD${f-+C#(a6To!^qhAqd81w8uN=$< zcP5c~JoyJa*d#zP*=TH9FSOp(A{;g{95#A`GonlUIPqSh8}OT{U8!6k#x?w@Mz%E+ zY#;ws&6JN=#1izz!z)$J~=a)+I|H>C2;a;A(-t#F5QiPUDd0A1^9p#a(-;93p- zh%}glhp6V6f$@mO(xX)xRh1=14m&u_H03vZ@0c#}ssHCBJ--(2$fxJ8 z^WQ9>=e~mfF2qxxRm&Q(`e6I9T)V&GLrMMqMDiv0E{69&0hdh3cn}^w>`BO~zdT;-^!0pt%%w1#n6^139VPlzm~K4q;`kZ`VmjY#y~1x?{407FQk>b3%xE6sV;owiqvnt-&1N=@TUOlco^%Y=r)7Xz~D;XJxy#$>`LIV zEBJBJJuMQ}yakt=VwWK{{+at1nksDbZuju`gZI7{bU5cy@bSL)8G%0Y&iApb&Y&w3 z_t1wp_XK|n@c3KsaI#?Q-c3R2kgYFo3h>nkHR#MR{6YQf1}$aikIU3 zXYO7Kl=iwGxeWZsy^JCzE@=fydf0?Ydidn0RSS>2m3I(&{XrxAbHI1RGQ9tcra&vf zcMJK0E#3_t=iT7_kka=A&js)gYI7r!aT(T^S)$Wr(X_Z6o~foj4c5dhxYB6xc=Wy5 zOYzU+ROG4Ru3&dS{gD4Gyo1q@2M;Sa+qq-i z*+I>5gZqFRhg=x!4C0Vyf;|BiWX>Y|J;6HxO8hw3if`J{GT|#5q)5$JIE*DvrFP@L zK;sCfAkKO05t{E2v%$5ZInEPD-{L%ZK~QsRwtG&X)rm5&l38%DzrLbN9KQ%s(O>Wj zGtry@kM%S9zIk!?oV3hIa}LveWaxZt!F~y>v}CZh-dnw&!Ju<_@#^+awLBgx9mYwe z(>ilpF6j~|Q&vo_+c6qpMst9SzZG*OngsC6lj37cP5{}FxDT!YFI~aon4RF|&EO;X z@_6!TI15ay@I%<<&ti#b#Y%Ael&ttuFqYF5o?xs5J4`SzFQEy>N*TEK#A7x(*z6|E zmelm*N%|bC5>jE7QYo3JSxRM049zPhj#LIoB;QCV$UqWFKv!^AAm8o^?kB%-fAC*w zrtn|M>yrRnpS)86NOH#2MOn{ajG0@#N{jXn)57>-e3bTI{=u7^VH=$hn=qLla;Uq} z+2jmCcsS|sF|O0UPnYNtuhH836~VUyJid)i(x?60(spM^yJMs63I0O)e+j;k>@j~= z@>eu^TJ2&LX1fq)#f1H9kVq5D75I+F8VT(4|{+0RL)-^do1rk%q(Jn zYZKcOF~AkUO@X4mDcBXn^7aJ3(s{oM_QvyeB_B%K$k_FUgQg!!hd*VPs{Jb}6`W#J z?U2`Y4x<%_!w$n3bl7zKQd^gpzQphJy2O$eID^gbOn7|YUm_TIiMT`%{w3lOLA!^K zi0A1e&*#OvbhhAKaZiGv7bI^>(ypTX3$8LH_JR%e8lKodZcKdNe-6`nOXtBhgE)vp z8#D~)!zTM@!HL}OKgDYS-w<~R0x;{~B}0Idl_Kcncd8s@%dzAfI+J0rp~hVYnbli}z)W#Quk{LUSFt?;U|xmc%Aier*~ zh$R*J3p9vaF7~<$lQ75NP>s^c}j{q0@ms9+9IX>TI0C;CuqID3n)LI|F`D$1CJh9d3^i&$ZYQNR4Ma;}z=coi5(QFYAt{jn zuv#u^iW%_e%g(K-t=(2+yW#P|JEQWOS-*{z;H4hTzrp5a`7RuMnJ9|yq|xV2cNayf zyWIQK7xDMG_#MfW*th%^DfV05^=jwgdhdI}T^T;37WxOH6XgC`1&o9?N8V1s;O&w# zmeucbucCnQD&kzNIRnm|S6H0oARTeH_#Oo-KI&Hb`uSGLcQNtBQGzNQC16njV1`u| zL)n!z!{8mGhxt(U>W%nk0Y-rb!TYTO9#HT0u z6g*9tg-GbW@QNW>PN=W?uXacE(YzLKY#}jg7%V)=^(|8LZ&LgMV|!Vp@ri$0O0`!( zaI7B^C!WXM95kobgoot$1An2b?}d*mY(z`MPAC7Q*m=QE3_Ei2VgQQ4UYo`vGB+9O zGVv7`e6S6mrF=ULd_Nc81`xiT20jjz$NAd;!ne~@4L2O24g>U&=g2I8Iztvi5~)D2 zo7Q9qFEN5dTu^6dQ0L|&7e(qMf79rON5_f;wE^fNZJ0Pi`^X}7QWdF_s>sINA`QSQ(!fM2PZp^msz{ww zMe3v~QYTfBn{taZ0INs?6RDIeQbSacI;o1(NmZmya*$skpp)us z!>7vIsF%P3i_qWXU*OWZIR4bp z&0+58p{`MXjN&}exZTjx?Ru94Td){JBZr35#bqdWyFx?t{#7;-%kWO9OQ!ELX=gL| zi(xg(4-t^QmG~?{FWqusHI#>G5IF{mZU6-_ltm#;3t1Vt{{+7=m{Xv^DaZO7od75N5VBN=kKBuBUG;MBR(kC49;e4s-rMwe z-@AxD#=P9eItKFmXYe4~aO!0dQ&RrJTjVR-%vc3adDAG3ifag6$y7}@7H$154pM-H z&dB2x9surDv;%1-Y^PKHckD7W-T+PbtRn2A2u(WW-(nMLI1b-nO$jBuqX-XDgeINx zAG8TIe6S`A8f2X{p-HFwoi?F{8^X~!geINx@3RRt+z`&oAvEcf|By|n;fAmwhtQ-` z{#`bqh8x0b6=83s9FtD@x7&moj?)#;DhJ}rm0pE2O?DEjo^vP3%_VcEm33TdMz#{s z_2*84EHeGvyod^&Vx1~$pz0(h4W{}p=-((`g@Y@XP8XMlzXUAw$2z^n3x7Ydzzlc|YLY;+^HJjO{Er2WL9+-l^A+r_l>8JY5-Io6yenxr z?8>}rIeBB=&6K=3?`}@skMk8sK9u(uCx4#z93`L2dxewt(Y4EY3 z!X+Ofe>4Jh9t9#eHjYHYSLGfRdY33m<;WLPgZu7Fe8nI zbfYyHOba_3u40CH@%o1=6+GgpA>DYI49)XCMa%9IWtg<>XDzsNm+0_M+JtsGXV}wt zhVb|vX}BRgDu>Xd zbA~+)H-w|qG)9YR(mA7-hD$dSm<1IKmwTzq!6k&nveW6LG{AUayj9J6;u{sYvjmq-o=XAX73Lb@x z8qzIn)EP2t1kVMwiP-6!j;HaAi8R`TkkzDfI-Z6b!t-(nO**IJX}BT0K8Mhxb2^@e z8^Q;22u(Vt<7v1dd@hI3q;oo+h8x0na|lg3r{ih3A)J%b@k~0W<7v1gGzUSvV2d&S za@l!qcVAUq>l(UMmOIo(!4L%g!MXnk{)Oi`;>XD+xNqckNt}Of6K9@+^`ZDw_-oGT65mfePM>RmUxmUum>Yvd z4VD|)q%J;*ScK4ZR=zIY0;m`@*m8nYxp4SH(ONzAx8e9Ot`-qq(haXx*kvhyqH#BI z_^>^aAvy8z2MWKBjgRTQfn$%F(SzdSwH*9O<|3?yEXnoN!R~W4(t0crt;a>MRk#TD zGquz6nYWAfM|a_BRi+jX`?t~OfBzaQsc21Le}6I7)nff8^pWQ$k~NlsKBHepsK&1& zeBTHB`?3VUANK#3<~9E-E}|nz7bPwZkh(b7N()R|1C)NcRXUyH>G{tp9p4a&akv0? zB`(1I%u8QoU2oC5!QS&UNil`0#RK^8 z)PTXL!r&2D7#T5{0pE9@a4G*ey3leZRzH5@`sdJEgysY;xD!+JdI6sp+;OgW!QF-1 z-M@dX*yTNo11o?$EB=fZK>mz_^=SXk#gA}+<@@K#4Rk~a#ySxGVx{LV)FyvPT!>cS zRU(vPqu@I2#6B9O_a!a}yiLWj{Mf(2XNKi**tS1zHza}}yXizuB_^7WO2Hnbvjvs- z=V{1;{HGJ|(;1fc6My7%q;ksT^IleUdqX9=ydR^*>4QY02DxfyLx2!=L$Dz{2@AiX z8UM4$$EIto;6Hkp5pm_ahW^G@pu%$a!6XvS_s+HxHzW3hN3pRgpu~ zx`IiADf`=7jZ8SuAYP)I!g0(fFVt|g;AumE5OhPZAwZzG%<2TxEY&rDBeKz5126{) zrNi_#YB!Y@MJg@oZK)Jr`utoAuU2I(2$*BpJqN*t0D*c;@HfqaR(V!-i8s9S@Fnd^ z>QM1qv|tREhM6WNFl=%rgOq@g^}!EBu%qT}z8D2wWln-@``4&PB_X^S#xFx~%O zP_a>F3a1neb>cua$oBS_-As&BQJ7kKtq=+~Q;Y z#k5(1VTpyPh7db&o)0-Yf(K|sdH|(TIx3XiXH=+F5?}7HjmDvECcnkpJab{vA&P-gRXs_|!$)%N~L5rdL2aP$OyH5vh+A72hJWgT98=w-nkb&gs%{3%)CfDqYN3px-r;xIxLhaGD`@VQb=!KamWP>sM>`M7Yx4{_*umv{<80=~^)?_=en zWhf7=!TvtVVedm(#`{=#5fwxmuF5)?Fzqxou?T-*L^U6>{oP1+Iy82h{|KGqegs$M z+ubX0O!Ezd>IW|6PI9CF1KMO^S7NcD$mKRLpk;{wiVgOY$BV8JqWXHmQ5=0#L!(M_r>?VC*+OzAIZo6AqKdbrO} zemgD$few>J9v5qt)|4UGY%=+Rk~2{18LK3Q>Wmi}j9wt`wE`F`Vi&~(9~cbgF&7wf zPm1yQ*kCY^nZTGi&z6JhP<*nk1K+Q*23Tt@QpeUVa=YNl!YVZk#IGm`B1%4(b`FgB z9HxYmXUY{8CAzQrv*>ULj4qN|BF1!4iHo};0#4HCe1nn?@v`Q?&){ey_$V0PsHO1e zOHac=L(jphBym$%rS|n)LO!;T}^W1OA#Ea0I#T4 z%quDDR!h8UBPp#CgYDnh0Ue4ZHR?@%nHTXX@}_bPapy#kt2a}36pg>&1XGidCZ<>^e)_H-=R%t{szt&Sh@5dGxUu3|79%^f=~-erRbmrvIc zE;=0Mw73F^aIG7$EL`qtgo{uKS8^>D!w7e=8X4!B^63DA=sz0&$}!W0Xy z1y&ve?a!ASjRse1Z6w%kIDdJ~BHS;pS%mw=HM_->pIb9KC%0x+-hOM=W;lO&%_7_{ zuUUlq#Wm|N<>%JS&dIHrmAAi|!TM*OsO@n9uom2|#)I2koIYL>%v0lm8uq&V?50J| zR@sCxrHOgKP?<+nsCh(VO7LA%C=dBlK{f3d8uy)0XC1?K>4Z-x)I%tZxKZaPFtsyF zf0)r~_%SpERLA?$rkD}*^K6&8m+~(;8iDBNJ^QvSScEV9!@3kDkA;7#e0VD)dXT}^M*ZT_>N*x5Xll`)Js=_Kg*(jEg3 zR579+fMyvm7vt{Sa=eujHH-|@D=caqe8lJ$ivfgjZAg*3cvK{nP4!k~cqLJF7LM`Sj#6JtIU5sF6 znG{ob^j5AigRCr$@@J~=Kq)i0(vWd$;M}V?h(F?mpOg#2@DEU* z3O~}PVbD#VR!%p4MtMEyla*6paK7A(BHRP3c7%Um)xKQGVyazjgv+hAos(N_D{sHm z{=hiRFRylldtlX$@DHrohv!!NV5aJkjCb8@R~ZhI7H?#;8@KggpXhI4S0G!;s(*K zHSA9Szun%;#P(+K5ZLyB;?2abL<5JOgoI}gSg0K{f{P|bs19Pq;8s)8S1_slhWIX> zgoOKkR>6yLE|bP|OC(tfy%!j%XB;Zf>KsxG)1&`u`3c0M(vvy;HaFrt%$Z6z7oP6HeJ zh&HyBWNbSvd3UpckBs^Y+*P7rq}PUeJE6wobNV9GSdP02u~_IUg*$GovdQ2O(+2%v zDZ6}Z*~bk~+HMRFr>h9B!Wb%sLek!?iWmJRCPEIKgprbHCV5DkQ!*xS`h>$KmN=98 zmy`}3ljoF<>VK%)f8t;F_2GuNPqCsOTBptvJZ$95*T*XId&y%JItzCfk`yNCwGX5Z z-*mYS#6X)~cO2N^kH%|SaSAj*I7J9Vih6tPQ}rCg$~t7t7Yi+l`VMcOdbRk%nu)$6 z4#~P-h(a~EONmIojERp0wGzzQWUzfQ1hAqisTe3?lKulJWW95#M5M=~{3Kci%kMjRfjn$_6!Xa%6->}~Q$ zIF7S3sm>}QBL=H>g*qD5;{LGFqRlx9ud_f~Ote;-d1X{rIz;noF?}oUhe4~e*bm3k z1K9$Coiq7~zJ#-lB7X?mRk(pBcQ?>(X(j%7i&Afi z^AdjBSzUNMO{a;TPW*-rEBz*MN8q=u$JKbj8+5$!jR3c)Rf_A9cab^XmAozV+ty;o zGQ27e;H&bUrdwH`&O1NFMCYe2#03rF!ql}XFxQy--xUAsu0^~kk07X|o(VrF~^=aY0LO**huFxyI5UxaASoA19kP(j- zJx{6Ui=ODk;ylsonO;ag(+jPm)>NHf!!p?h!oej5^g-rkd~S@}UC+Y;E;}7$%VqF* zTXD*wuE8)`0~WZL9v2uP@lkMj5~<6RJLz+fTf>LwGml+)KcX)-ew4Q(g}fc92UAEr zm_nJ$Q07khd}C++4=DA6{7)!#Yr)<0)#BX+|Dr3R{#*#L7Arx5$I0&TNB0&iraMq7 zMP5T;hf{FVr~gwtbml1fSytKag4YacDzV0yjNdCy6(zF#QnX-#O;WHAe}j)4+7oQ@ z$PpmrG6S)apD3Iv8ywcw6G2ZfW>WCa{g?8kM3xwEh4<)wkNYMT3mkgIuVXafrj~+- zt_+-^911|v;134PXrLs~!5S!;GFqeS+UKTCk>NY4|BsSkm`)y{KO)rGxCr#bQKcjO z^;b(h;UiHV!ZZQSyulVzN5B#hpB<{Ku$b4cNnQ;>4DGk!wwSQkZ`;K2g|4tFlGxiy zC|QHcR^lZ?Dp~XiG)b-4zkh1EMaL)^5O)6QnCdX-6@_9gk9c#W>RNR{*@ zWp{&@j5|`x@P)e1C1k_48nr(wssA|gZxj)gsB7ow+9e1PzM>R7O*UIfSwm2;#-VbZ z!C@WSia{Kr8dMVw>9Ij0blHxY?@+CqKts?^23uvZZCLsVQqUV@$brjhMm&nRnngpk zE!3!b^itBAbgzxLM7Nvq>1@@Y+6@ND(_}*lX9oJvjE7w6KVLiMoh8tT{0piK3io&X z7?DUd57THJ4qQeop?Zw+KWloRUbHYq5KAz1cc%NniI;U%ye$6VmeTLwE;L!A?p*(a z!n!kwvHwpws){I;Pt`W;915pBb5xf&6kbP+`nsEs(v*`o4YbB!7bzECRH9sb$>MPl zfz{0v3k|VK53V=dq;7)?{}GS1y-WdfBLG z!M$6ccb~z zN5`9x+Qs0@X3`$VZYB*=^=Bq_{Zl2KinRqz4V?%busiGCZ)L;eFrpDeyDTnV?bB)4 zzghgsFDbseR51b;k#Y7btoC^$N!3bN4cnivMLp@NXyilx|M8U4h@zIJX1h;$|By5x z{{ms6-zxl?$35a(2|V5mJ_%ekd^*8&gA&EkhZJrwZdS(9VTD3VN*4?M;aLg2Sl}Y#HsffWAr73n7T-P&Brw2`FXjv`9Y6$Vw&{C;U z=tM(+*Sjq@%m!WW&Rk%O50nkct-3H`Yo~dw3^DnQN5Otrj})z^ zhS9L4A^RP+&h4pyo?u+49=du$=m{1h>RD{)aj=@v8heBuk214dHMzvikIi}Ot;uDm zQAS%$tzq~Y(=>H75v1uDhN;|1TJh+CnsII(X;t$a4GV+8ovkF}c?w0@frj9MN{%Cn zhv>=BHx;h!6YnuKo2r7#shAsyX(#1F2rIS>|D+HLl*@xCnpV$|Oqwd26PuCfCSP_A z!%=E1DxpVHB?g9qeGOaq?YjFq$3pC5E2vSd8B6f7$yf80WpXanH%;W^W8(0sad?ru z8eEs6YiiO|(;h+?lP_zkg&3x7!xqc1upD=gDp}txfFWn8#b!>tys07k8{YTFNmrEZ zl{sn1{x}uwN2XJi%9L3#uOrPe%RzoPzDqoo_cnf^czjpbrIv+@C9# z*fh|WEJ~C>G(DC8%)Lt`4PBtNMD$(?^rffru^Mq|mY0r0jdFxK*&Y;{HROI6=f)qa zHv$G3I4u$mFO?C6#xC$S($TUomc|yNP7Cw3rhyOzKGQI1iD5$sgV&`FHf2))gkPHG zZ8s;d;kdPbJY?;Lk6)&q5L+czYQ!VhvPA2LFYjH9(NF|-pjzs$=fH!Sc zX9+gD+v$|RcJ~h2JGsMs9I1$7F4Jn!cRrLppZMWO)+rm2*vPDq+Dyc1^XOh9Ngr?H zK+)$&xpVrW0%PHjYX+G$h<11ITfltZul$WaV1%D3f|N+@!~O_sL&NC8iY2(`3<5KJ zlCci{aO6;ihbJj$o~l={)10pw2=ajwSv9C=_>9UZL35xlL`m8@3ZGQD{n03hV05nf z?*A~=<4S_h3jg5XoF-(qU>?IrufbkZJbz+xlqvpWouMq0u;pJcY@U2kn^qIB>^flq zw6#DUHxrBQr8d0R{hn&n@6kOb@n%HH5f{EUn~nlcB?f4^MHkb)>%};PE7On|iS%w!^GkK04=w-&EMWC=GN^-3u_3oi(J<06MFBn&>kSKbNqEmXO(3 zMVTTv%jpDMh;jIJ3wC=^X0Koa6Nz4PFuM?mgw57DD9g)@`v#w_o+FqQ#(Uf*=!ir1 zpFQ*Bk-UT)?p3v@4?f%bOVz9v>_vvjwvqn^AE-mS!WjhX8dMFnRxjl?7E3Q8C12!T zqFU(^_nXvT=r71Gm-ND+(~C{$S7d@3D)&*tAqt6h?O7A!3>AF2Sg=xY1HUCEhrhHK z?rmKrhL7;pogur*X(oFx2nP>g0i^^a&Gv;_8vc$5#(~dV-79F9>k9W4Wm>#ZW1;d% z#?AD-|7M^{J7R2(Y0yK1pBT2Z|LCWIdgcZ6EjRw+`AO(5nMSFU&xTC-&l`H$E)xqO zx^(-L`I5;jN9K|h>^sBXD(KSrec~L1Y%u7$|SdbSP)~j0C<5Idq_yQ3g=i27or4 zBSGYU=4)r`wT~fdI#Z>Ue~Xgr-`Z4eA-o^@Yn0?OEm4wvW0WLX{TG~JJn#rqYy=y> z9-r|ITzJh`i5i6sGHns^k8;y5=`15cxTaGZP`8FP_@XJIR#CDlH%5)SH0bwzI4r=? z-&4Pe0rsnFu!9{@n@>kGMqD~CzL<9vSM#VroP#KvyYg_f%;MFuV_v2xY3CW5jgowO zfKG~J`xv-2xDpH)8b~CuB>iHF$&E-5Y}h2RBwrk%b0ZS^Kn`+WXR1X9uBO9MVwLX! ztBSDp-#p;CS%shUbE^1s5=WcxT@>EN9WL$m@Dl(zVG$sUu)Q4nSScdqlNCgOWYkgY9;z#)X(JKA%8#!X=XdsU!N;K)=#{+UaQ7*xUOA(bVB390xSAvYY8+ zmtkcFU=#3{Lxcfd^3TFZp-!n1pp_5?KEP5Taa2Gz9z1$Q`L+Pvh`X^DMWH5jupE<& zCsXlczLiXLI;Eq#oPzFE2xJy2iv3sMh$3A7jrs(rF_G{vvco8>0GS1&q{6r=Q5Hv( zlm$(xszjMXps6A3%m{-&RSTceT;U7pFW93281a{gK2hZJmdd~Z35i*J20zg;axh2g z42@Z#0lAoUIs=&Sg>5Q1K#K=?wrG@G+FNvnD#cNM!5*FAOhpZ*;%gdl5-yBHg~j4? zkvR7}HMF1S{x=Qn+#j8wutqY#-2p%_vkO!Y&h^Y4%gyvp19WA zuCBq@?)@IecEY=Q^z9aHDQv40Kd7++Kd8a9@B&P`!lKO$TFLd8(vO5cIPxbHVf@5p z4)6xsz1?)}#BT34`rsdc+wc?I0Q}DXy#z$eIiK1D{}1jmTm|G3Lm}36{sTB(#Ge_* z5QZY2xX!?>2*is(o*ab#( zEDU8)5xIF06$)Ri%F;LGm^AmiaJ$OZH|3ag%D-ug)HP2N4IE@oq2D2HE?N+r@WrXC8ii=rZ3)d}4A+ME^lz zIAU2FH2mavdh%p({2V8tkJ_Sv=sNto!Z;|ACklHz34M5$v4Cf%1xW04?|$)gzd=r7 z!3Nkm^9+~JD9yDy{f;41-JClSU8eEdI-0F_1vd5qvB^l^VuW3s?W zSO@xX(*I^i{!=4KC<+Qh353@-SE>I3%uZIx_0*3x5O0j&UIO9x_y#)NC z4<~VpTW@?+=_K{FCtYsRTrlD!`kHGnpnPBS=_dxkA6Qpvi1b>6?>I?)al}Xigj#nh zCc8!Q45REsVt&;6{YHqp@ngpL(S=ui3nw{cfy5bP0TZPb;j`Ye=pCn6kUoRO@QGq! zt=NqbL_AKtC;5S5v%^g`M*@s6>lJxoLWh-;tWY^aEMVe$20bG>uY?jK0M2Ee^nO~$1&}#ZVCN1+fnRFqSf4)iQiJ`DF8BsxA$aly%d?~Na5@YRG42CVtHR*)t z-(L(EDu$1S8|e{h-9KUBzF=^n6fEoa68-v#MW=&xRb(N`O1CS}NlE>+I_yh-m_6JA zCZew!wTUO9?>r6Ye)GYo0gX0mLs;=#<|Lx!@q`HSM8RM*sG8reNt!hcClM`HIHGX4 zIAxWSFbf1wpXlWz^}0ZRo6c-k04KyC(A#WHApL(jyBa9DiYi?_Q$0PIgiOzbgpfd( zgaiplJ4B2DSxq1b2|^M~*o}*4UwXRV%rxmgUw0=n$g=zhx~LfOpn{RjDj>qq46+J{ zun`3TMH18%MLmeSkh89uC}8}9{chF0^l^f!di3{t zUYwz?!&ZJL6}|R7PM43SaFY9PwZ0DrD8p$TesxOU)$<6TaHlVNs6Nbc_0l)0^laXU zN@pj|oQZb{DjnHoi~iYA$sSu$e%=Co65OoM?Z~rM`LYwVL;t4ly6Gw{TUCk4CU`8m z_Hgs4GN}zYIT*S;pW1`Lm+X%-It}V2sAS=$G|LrkJfou{^xzzr!U;k-p_pX*7E3u{ zkxGZ#wG`!l9x@wlp>j~TQAt6t9$Kt2FJ59D{cEZ83b&9uuJ?95(iJTY*<3?8&Yo;zQq54m_d<{9258MEPf zEIbtphp#}U>G>FToSYYaJNyn`VocL>b1J>n#mk5@BkuuT>dv?cuS$pW_yuMHuhQYX z67y}E_D0FW$eccVwf+I-s-2_fAnq6F+-1gzjUIRn8Kn)UjLY?7sTa{DhsYct-%qKG zoN#vi(HS{P492{nAeryhgCqL0J$rlfPiEfIpK-@&#b}lonivR&KjZ}E^oo`Gm+3#I zT{>`&FnSK+4gryKt|&M27DZ_n6h)^#JqI0cTNoYf9WFkj`*8y6ja}Dcjub^t`9#Wm z^qr#?`XUfb=%7l6Ck`z)ClN)FIY17>*uNVUo?hKQq!-_i)?e*?tyh0z+H-ySt{G3D zv51o7eEqN8|Iw{q#JLQZvN~q1@9KW0TfeTa2P~)bP_5*-%kWs3%)lG;oj7t%rK6|o zTyG|TTT4P_eKh6R2K2TYyY;ubf28&2dw<)DUmAV4Pk(yG*PvgNXa~Mi{yK=FXa--y z{u8qLH{+r3)0gWr->%YUMbevwP81FJJGKi|nwqBtm@-}Is1A1FCwup5{Zx8)Mn9N& zB@>Oz>XCgkTfV(Z@9zpJuOZ5xW%P@N`~yzbH2p-^0jQO_G)!QlB@Z8!4fQ< zLC-b_-H8jsO=hXIoast#qcC4xfqs(DTUh9l$LLQ^KU|pk3F`!KU(KfX;hxM(4o#=) zqQ$9tBUVU1oWii?ZX5}WV}S3``azsc)tQ_eS4G&nbp~_eDJSlYdRO-pGCuTPNnpO| zeZp{7&%z+0;^LR<<>yAy%IxHrg;DT1sYK#a)*#4n059#xtBO!V?#=4H+*D&5%) z(PUEprR%nCef7Yt1NvJ7|AUt%?{RVO)-PmV!wZe89I`+^l>IS~`{_2{R2J}^`7o%xl1eW3pj{ra}-gIT8~@)Gi- zbrz{Eb28APr+<^yhto&W`d0jKP^HIR+$KG-E3FTuznIbYPdhkGKihY3x_-3(r~OgU zA^FjI{>ge`!tBn#YZE61FKJz@i5gV#J?Z2ARFLvQov5eIZd_hIn@y@$`@=ZYsopC^y#akc4mnWiMGi6Q+^QeeQDI^99DS?48)jMQ7>qiRgi9_4J;X8k z><#*pU7tr1S2{eS&;L7>e#M2C<6O-T!A2o62gnid?7B*sORFV%>2iF4a1Z=91P6iK z1mrG9M#`wnp{p(19qCN>DfSU2f8ZJsHl(kiw@4BPi#+DyX5mPw{RwdzaW_HH@oPg# zL%FD&ii?@A7ccD`GmsdgVll5et>)oa`X`aocI1AHUiZD<>N4aK>kGlYC@4!6H{Y3Z zFQGruWx138rx2yp;n(97M*-+kGnGhP*lj6JV%)s|VMcnNo>Eyg^l1i2FfMwRMRIms zY=Y`hds79*!&Ma)Z?`}n{s80IcUXYmsj2-4rqr=& ziXL$F+MD_@?t~wT;Rk_}KJpR2A25A4@<;UhV)Xrt4;`?CB7ZjHkH_d204Mor?zk7R z{9(pduCxG;d)4_YN5$m-J>&Ad3rTmBObKk`BPhecO z%)XWJVaA8nTHx!9uLe%(J{wE-e8%U-(!EIJvmBnWsyySH&a;9~XZbrAmy@yzjPC+Y z>B>P^;{Usu-mUg9{X>k0Yd<#PmyEmR9j1ScE3K{F!Zjevu!M2hi~32Hvz+l= z>n(6J{GP52jLUa7H(OZ!1LN`*qmLWD2IFqghUu>XPIfpL(-UtBOgj^<>{tfu!Zo<8 z(5!@i&~*>vGL4#n{L}UCjJrOV=?^d-F7H_SgF-J0FcwySWISAru@U`gtB-7dy^ZOI z7?xi`569j#y4HmF6VZ}_c8uH z*5`i4Kc*5v+zsV&L0C#_5H|xODOH$zc79Hn%2e- zW-a{+rbjh2SKoAtpAo|!VEMx_{0YWSW?a&J!QdDN(twRF_P0L?ev5?#p9Mx5|Aen* zZTxAB%L}KsayzpcIOX^GSh^dSK71=>dsicUSfPt3_r~Hr($>)>Id0p zR}9ZG{=pc2JmXi!@Y5Kdh~aNzydJ~PXM8+{Z()2}46ic2HHKfo_@)^CQO4KB@LL#P z8N=@YPJShmK<+lxL%=)DcOGZ@LuYZp^6g2+hj_o1#Qu!&Z87}!z{&nu)`#6+y>9TM z)sZJfg1w&v#YoRRF*(bCr_?NUC`O;8|3!v=P+fjsyL<|D%C84{r3Tc?x6?hm0^nV$ zUw!wZAx2LVM*cBs-Ba!O4F*3}t>pTZ<>*@sK3Da!9fa!+##Iczm*otzoEgmUP2gmo z>?sz&n@Mv$Yv|{wbv#dDa`lX%$G;&#?EM#j2HPPMI-G!Gxw9kvT;Ky8n$O~CyGyqSQ1#Nepc*IFYjg`d&28~7so*!KU6339#;e5j4m2A)lze<4Bs zUlZtO;6+a-dmf*FpO%213A~e@n-l1Z3HSu?PWJhD0{xx@{HqE0BMJC{1pLiCi^M(ZawDM|CE6La{_*A0{(>r{665F%4=T&{r3{+ zpG%IeB|EyxvA!_NnWanElQ@^#-E$u~*`uhH27IXD{(98`hrfcR$%1rGDi#4xQEqIWVPn4P~)XGK2QK(fLt%~o2 zZ*JzBzE|-pjnZYlXz6>GHfmL`8B8j_TBsE{+-L@+>X?B`o{dmzD4x)Zx2*M6sLE!h zsaX+0ey3L(xzsN-Rmm#^J|$Kvs>=DBH*NsZs@C&`?W$n4C{;_%Qoc-z1o=|K_ngr@ zKL~1p2L+UWo>+1XVJa9YRqAC3#g4F!Tq2!q%ZYepzdF_&SEPvNM0!?{=49O$wxVBe zj(ZKi*#<};-zbzyUaeZ5jFLpqx}z+YYdgJazS>6UKu1>?_Y2#-LMbS;%K5-^I2pgt zj51q9%0^S~JRvI4QzH)BlsTbFm_(aB z6WpVfr#;lhkq?6WBpf7KF3xMB zU;`FMYw3^HNa?WlT08=jsztxrRI;?W z(%4r@%HLpHK--D5o6iziMMh8;gGuV`NSBFDl0E8xRHV(1cGxP~F1JOrr)evIO|v~_ z6gD5BUeZ)x%0;Zc3jtehZIBHwqss#+|n95Zr0R9$?wqz{lm3CmzL(MY@Fhe04j1`OdW}YC3 zZ($Ua5ByO@gI2$}^1KVypL4FaEVnH87Q@7?9zBm0;8($RW8BujLBLflimb3 z(o|ruP@z5vJ8pMYdUNg!wVs@b8yPc#mYh`l=6J10$@mimDn_!m^44#;5ESD0a2Im> zEn-nOq7o?$j6UG_uvI9}G%enad>NxM$u`Y6ylSiB0P3srg>e_A;hsr6DQPYMIC9ID zF4Uz_tx3nLlkQ$&B2QJp45*wH*jFf*X3juKng)Eu2uu_8&K-?9L^el>!cIc+L8&=O zLGnpcP(_~Nc$b;(k>cQaJ3;4G|o28!yzok`yc2YOmDrwxhy(HJg3-B1S^BLdneHOm_%gE(91j zPfZ~lljbku+r80zsq8F%1|`#0UT}A=F(xmKR^8fZXFf35baIhDlN*y0niobyp>TGP zHle4%9qTpd+^i9qHMMyvE+H8K+TJ0w&_%Ph|@ zoIDb28Z~BuZtwQkC*I%zm`iJ;I?!qs8@$}ybek^r3{pa$!zTA(Ha=2(2I)y*spGzPv z+9yFQl75zi;gb7yUca`el_Ae`zXX3c55(zzIj^tD{VpaJ{Usf_k0buI2>;3aa#8M^ zm_M#R!)hlUNVdqEV1BvN{y(}T{doSb;<$4Rl~Ny^Wgq0e3;RCkBK~9W7w7*FFtRI? zg^L(N0CACjMK94qumH=q3Im*wc6*5=rLZlQ!ny*X%uR zFX5N_b0EX3amO$F1CB7i&{=7r{|f#RA}IfozUC66rbr0EurleqO(A7VY^Vb-^+ZV9UG6)^BJAMh%K5U{9{bk=p z?>FqPIZ3h$oU^0r)Qbr^av*C-(v7kCCZ2@3Y9tScLEiJe0uS*k>tfq+sM~ z4=MG=1pXHux0G+fzLV$`=RYfff8ZHQdD(cB+uACMtY!YV{XTS-E%?7(5~U9LMURUT z_J3Hsgy{*ghO@YB-K#?lvlk{5Gv6Sd-0 w%m4fwOG(Z09{h)* z(l)K&Hq3|`5fvRr(NRQ4F<}chjsl|MG8#ak3(&zWEGqr|&b?LLMN8g!-}m|c|L6Ih z$Kt8J_x#Se=bn4+eyd2W$S)kP6GcJ2^ulN%ObLQbVGQ{ojM{|ZLM%AFg$yQZP!nGv z+|ZSSYABdFP1=kUwbBnREBg(FZ`nDmhT3#2zZyk8BAlJXaW&MYivnxSJ)FPh&zFZT zW{e=zlMr2sojODbvik@`_{im z&^7gI$K3x-ev_4ShwtKgBExj0+SO3a??$9kUBy%W*T0H8g(*sY?bSoQS`SL7WY2s_ zItZRHX*#*PiEPm;*;2fdLK8pVQgO}gr;5%E>(g)Ey1$NZIWcEg9f^`#r{N~OWWQAq zTnN}j_@3A9N>j)y2KC6W;}tkx!GJ5_Y3<101Ou)luWv`s=63KD6zEENHn)Sn*iOEH z1?v^cHLaa|r$hEi`F`Jy{)Tqs>)WxDD(Fi3e`?2`gYEcvTswM>w`0$ecJLYP*ml%UTv2G)J&||hwV<8yZf-}<=k55RV>>wYDQje=pucKIf3J4RmE4Z~<^HQ7PiqG+fc;k*H_kEn1fix3hT$3wnq9@k^A=TC71udRY8}PJ zLUF;gqT;z_wPo`v>KtXY(~8DbR#%lxE16SS#(7uf6qnYQ6qi?2l~h*TQ6|*RDV|?a zH@~>5Y)NrxNli%zr~|jS%DE^6l+=|uh^Sk-XlR|ImZEb@93{n$r8Q-9i>v1>C@Xao z-%(OoSzRjBI_E4ct}m}GV;RgVEnZYoyHHqEwrElHVnD=Q#3YMZHUMkOE321~&a$e~ z;(29`;)<%dW%UGguXXD@q(?igYpgrgUL(>HLMof`Ew88_T2aMZRayxPi>pc&l?g~zTRNX9LbSNf zIY;qfO^IVZQWi5=N>6O0FRQI$6uezAmtu=10xGK%ii=Ch)Z#hCiO3_jX!f0OGRBgqMMK#59oaN=b?7XhRSn-kySY2A} zL^+F#$%dMx{iG!LK@oH+TOVP~WI;LO_N`N%1v7mye>A)OD^5(&e zqY~AH%_fHp4SB1qzLd3HwWNzn;9TBNiB(%tQOD{Sg&jJlVjdYm4o1kt28)aXQL}vvf*1SfiTbtXv2u)|J&V&!Pt?DF9YhRIzMJYL_ze%4@3^6)!HS zbe7doW+AgGD?@H4vlR7+QX6CPLP1r9qqwG|7Wq)pvU&%SmpZCZ&XRJLNp0Ef&Wc(V zom+;UT~@rPdae_r0K_i?(D*T45L|i#>#rIK-c~~b=m6^Q!8)(f!8HkZo1orFAa6`e zb#+ypgSs#g^D3+7lvJW5wH0h!BFVW!F@zAaq^`80V)z^s4^uN!bhGWA**swuibm*XL49TMApH36k;^DadKT3$$ON!VBQkwSkg>mC$`oefPM-lr- zqWC3Zhk66n!s>qa!SxSgn;9kCqF|jWD&lDuJ6EBT7;O-$6xwlBMGWm~X&+2FoL!05 z2@Rm>QqL{a^MbRP9ABrLXQ;UAI*xCMz-^bsQHOd8q1F>3xrLoc8t|c$EDf(BM5Z_z?}B zp}|`H8_?uq06Pg zl_tV?Ycx3dLcP{$aI#6gHfZpWPr+=|;An`UYl{Yt4WWXtRfETA@a-BL9UycyY4C&) zDhT^DxTL`kXmGWU5&MV+*RJDyagIhHCRT|u?!Mkekz8bun1|OoqQ#AN+4W6pOZ5q702DfW)s|KH}!P7MO z3=Q5>gU{CBy)^iI4W6#SYczNt4PLLo`)Y8P23Lkc#$BVq`)TCYY4H9Ue1irbsKFaG z_#h3wMS~C4;9E8L5DmUvgJ)^*CJjDRgYVPe!!-B-4Sux-Kcc~D)>N-n4StOZA`EEo zYc=>;4L)3hpV#0y8eCAuJ!=0WG`LBFt3x)i%^F-8`dKQA1|Ox-W7XiJHF#eQK1PEN z(cm@>K3s$6YH*tdAFILb8ho4vpRB?2H24e+p0B}YYw+wzDn?icOuGbX#w`1%Sw-u5&(bio65_iocJmT@q<-!ef6W(>fu z)War_{Cwib$AZCN`8r0^#zsCArfEYX?+nwlk&#~x)3kw+H-%~1xXAa1Y1**JtHLxb zF67!UO&b(>PMD^Ri997t)5cF88>VT4A`cDIv@wx;hH2W6$Q{EpZA4^Un5GSg{Qad+ zd1>Pz9}Cm8;gAo7Y1(MWJHs?>FyxoRG;J*8O<|fg6!QIHnl=*hsxVC(2)Q;))5bxb z6Q*gyAWsR?v{8`9hH2U$$V0<4Z4Bg|VLFxQj$yhx(Yi3*gXr&n4VB+Y^sz8a8v*%H zn5GSYyfaMG!e4$lOw)p2-V~;3<1ODGrfI`1uL{$&5s+)cG;ILnIboU>{_>PCO$&Z` zY?!8nzC1Kc)5cxy8K!B$FLw;nw9uDzVVV~B^7p@l%1;Y>`B<2y1-*PIOw&SM-WjH8 z0WZHCrfK0WZwk}2V3+R?)3i{RSA}U>pv$#knil5roG?uba(POariHjXHcZn3Tpk*x zY2hvR4AY~C?ii+Np)Kpe^cbSQ|2b5C8_~zYG%cv*Lt&a0((=wQO$%uGA*N!Hn_sm(!yo=xZYAK@oKC2)cU&-6eufh@hh)=u1m3x2G+F{x*U>5kY?u zL4OoMzZXI8ji7f#(Ay&D7b57VBk0E>=!YZddn4%85%ls1dT|6@6+xFr(6>dD=?jAvRiJ%iA=%@(#(&C8vkD$Mepie~5UqsL!MbPg>(0e23 z9TD`l2>OKx`soPzu?YI%2>RX#dUXW7Jc3>vL03i43TyHawdz%!5xUV3-otAwh z(9|3-c>j^>{n*_q<$HH}-}mMp&hs|qHlH#z2jX(w$E1w25cTFCSa}?wN%91+;>*wa zCO@k!KkKX9b1i~P5FF9&_zKu15)FXpTR`g=E$E0Y0Bz#5f#@92W?yzP0zzSsaFwzd{VpJcR&Xpz)^3fhX~SkDsq z50Ey}W)dj%>mXf7Pl%Zz&ro_8;rUGRJOVSov64y4d0em|UV=Cl*+{>Xq>($N7i^N} zIz>t_NFFQV`JS&F{TS0s;@6WnECGUAlRPsa!is5?JSNm~uKPliwE8ZrwYQ_tLe_q1 zllT3$UGO%&)7rKRS`UHIw#zJeNTRI;tVTpApMh<=tdz;u4rUT%VxUY?v9v~<902Z% z#I;dUxHgMvMPwp$oZCeukvvZ*aTDSVN?d?}cfdfBaRHyD#HoI4h?BP^Pd^@mah-_Y zLK#V(Ur2{ivB!9vHK09e9P(S#IP&qSah>F;LL9n*PUkU~$pp=eFWAVt6%4U5M8{Ix%n%!wVT~Z4Auf&>C>4LLRnSpBf{Bnc zCL@T{*{n>D#1Rv`!P4dwz?-v4!;8 z1V>}rZqf0GuXA6Bcn`Ywi{AKDiZ-7+xbmoA5CwP*n|J8e2~tBfqLh&0rHHrfwn+_t zAn>D=Cy+yw*H7gC%ZT8z_>18RX!vht(Yi?4i-1`ls`KgzosytOjE2clFk`5;Jl zd=f~j{5rAT18(qUU#f!ay&SMn;%^AJU6rtTvp*+bngW<0@ecy_pC|Q45GCi!4}rfO zW>O=jhCT+(%ym*HHN1m8^2+!PBnNBdzcca)A|WkzV&wfQcPZr(?`9-yZUTu$X0=X5 zBXgr!pe=dsL4QNkIH_h*C$AZ&UfAH~S&t=W+lhd9xb`=%Yy30Mro>%QVa<4RDyeMe(~8 z{HerOs!A{b7)!urRuR`C>Oun|+v=dR9aG5f!W;15EiFL_&f5Pe%TQNUGge8M%>27s!oV znSpY;8=$wqxKE z6*r%tGk{u**C0w&BL9m^v~h`EB=HWD=!7U{c8tl4G=iiSZEM?W zmKuCIs)3*83qmr9Y+xeCK*kd3W#k7SI}&*ZBX@&j$J`4+q6*&RIlcr~p249fIP@4n z`G7Db`lcVwZ4Pvb*_kVCYjOuf_xTv=@b4i|DM)U-M*__r6EyfHG?Bx3-BRhgLQ2~xH zQV)`~`-=vt;Z9O}(S!v7i(tj~Hzss!9v6}~GxB|qF{FYfz!qZ@NLuu`FJkI@iJL0j z{UIVP#x3AQljs5_`VhztOrpo)H21~9(%tl&8_N*%4$1QXk6WpCjbQS4X_h;`KS+0c z5lEA4{fUN~5s2fm%2A9Q4AKQ!&I0Lv4@2iwK;$H%;yWX1l`sBC@fgI-^4Ew{OR=!Y zsKqD%WnS6IMuYFl1%aLZ|BaEyK+-~&%{wrA@q6(6Vu$~%;o-ZuP_VCvsuxn6?lJ{ zn)Mkfh5M+$mzQp_`$itIyLX9!OHfvjGc(=1;v}-4wX1xcHgDCRq-d($`!jzIIwr6W zsqDTx@l15q`Tg=6WKgIMSf)57D=a-RmE?Wb=4GC+m$tEH6By+jk?YS(7on`c`=Nb< z-QVda6iTo+x9aWwtE27yyP{2j0DS7!i<7*S$4&I4m3d~?uD7XQ2L1xaY0A$FsaR*W z`;zC{eTEG3l--*j$ZE1TpX%a1w!(`?wij18vhsWx3+7-6owM6{4XQIR*WRB?<2Z)W zxObcF-Xq)>to;P~2ZBMQx|*kQypv-bh^S*Q8hs-oI6AcZi(22nIEnuQojsjF`KQO(T^8SEP zAsPoZR2OuXGK^J96mN3an)d>Yy0?4J1?(sSdbcxHQ?5eqF=l%9&(O~D9ttV`yqecx zf%hl7KYQb?@YXc7_5%a4Mk(}uTj1S|f&_}uFAIE*bR3-QjdmJghf|^o<1WmdHEY*8 zWD;0I`CkTy+Y4*o=q`OIQ#BjAZ-ph$k$Lb--pGs~SbapcPrHAD#g5}9E9xW$1A~Qz zu@1XJ{+o^gZ}$YjKEPQ)?d1SifFleX_TaS?=bqW#=ntf7OBv1mMf zA#3+8I}&JN1}-~7he4l0y4)^RwMboz(GZXl$C*xd3YAYuFFiEYN)0J=vVgwK{DmIC zb8zN+$E8GwV7v>b?{#{|WprAEFxMA&^wnAKM3Jy)1{DsT)8YP8RNMUD0*bAjKHE`n$G zjx!ZzerwPCKJd08auOmW`ZmjEoLPPX^R`+E_h2ZdN|4tA#3=B_EI}YOK3Fi$6!?ph z)~%*3qqLWgkLr^IG_t-l6Jcz+R=SUtY?!7BydMWnkP_dreNZZGOK!`XEMifX27bP8 zP_FwMQCgD%B5VKY?u9w$rG}4D{Cw~E+{~tY6uWTw9REZyFGpTcn(Ln#FC6^YUb?Hu zH+Qdz27hzGIYT;i7w2yBfCUPinAw)?EA(FU7VQr7M-TTE?Z#6J=yJY-M$os=+lmoF zJ+I>JAkV)rh%A;rTIgFuNq5YI4SC-D-864rdkp3yb*M=cycRqfnPwiYut zIk&@JIR|XCU(DmO9nt)(rmWB8V#G9)1^8@iyWIb9v)9?g>!6I)!Qns|)t+xz(`6N4 z2NUDQKA+&-<;~yjK8<=92@D4-r+X{od|}Qpso^B6kE6Mn2lGAq9aEMs$oDT1b925} zv0$t}ZkXNY+;{MTy|lT|SBX0Dwvcb;9z}-dusWqICYznF+5I(^f}F#Mzzn!4a6bx* zCr!}ke1X?WKvOH~{GRJC4kD-J4`bY_BV4lE^(~^oob`d_~)-noc2K)KRY3l84n#7pyqhs%)LzSHr7kI$VwFdAA?0 zp1>|r=yPsY&HsQZ-`gyYgdD4-4)7R*&Ic5uoJ+QOhy;ttd?61eai7BSe}dy#Ud^BcSOdl>X(Ci5i> zD&PjSVr)2Z3sRsX^tAg^k3gV&-Hs$u0*AZF(O%LD zFl!w_!#jXGykc<{=I^v;2xnniX0zRW?5sWKUF;%O zo}ulB-Mu#bJZ)0}I!~`W7DU{hdDxCqch4H8>aKLG5l)`9(?h^}nYNz`{bdLC>V+M& zUkQBr1uZd7V(}FCHWv%M+BKNjgyX^j{CQ4r5ZNpMekA#KNi{A$_ zpajj??)|jT`yCc6fz1ja?mGPtt!wS>!cON=FpV`0^adUDkh;@*0xzSp{4f0VBrD|q z6@RV!jlUkg++Wl#1D}45%AF2l6{o#Ho%}MVRYp1u?WG;ReFJ5e`K|I_9Dv{a?fGrN zWq!NkOZe^b`HrosXd}XxVAGG#N--NdfCAslbSrgDyANG7=fE->_7&JE4WLCbwkYGw zBCSG{agO#Uv`+I+Kk~W(MQ3e3uz=NU{t=8=r(ay>zD{)XFZ8E(Bt>t~jsXK);9(T2 z&|8XyDOC)Vk29bC6I++y5yVwDVk!;pu#z+XiQ=+X0t$UK)&j2~y~yiGxBehi>vN^V zD1;Nuh)gmU`btd`{Ug&0yvt32dX(7it25`eL_r7U@bN4Ytb{__8|-}gqYpUE1-|j- zLhlc<1wLc`^wpufNA1l=4FLgG7WkH1PJha`I!&N49)4Fq%hS->)O`j{x(Nf?;@|Yy zoR6eccc4*V5tjAY=|o;%yZHDvm+;HSw;LIBO3bA{vZ1`+6%sKNxc4aI8@9+D?AP6H zkyiDi6#neA403A=#=2Q*Kg(|n+>WCZjAFX!%_o$|U)hZm_fr)2##lWQ ziupLYBN`$!UVi|~pw|%A3)QEfIuHZBr2j@pWPQe+DYsmL`0HYCcIu4spEfJwG8Vr$ z6RHL2Ad&`-EVeGbd*l=}H}Xy@qxZ^x1E05l z?{}!9oBG3%+eB%HA*!LN{%UF4jfi*Z(r~E0Z*!|5<^TYl0wfc#Q`(lRSEQqwk3nJ= z&UR#-9j7eb8_cNO2hl{(Xagg$JlCwi*#4Dm^VsZe3cUF#m3Y(-JxQFV^H^4YErDL& zQwhGHHOcv3$k+x{gt9)s`V9+ZI?T!S9;QLXjuqV2JfCs-6sYlM-;Mph-FLG^JEvhS zJ;A&;|83NFJ(m3|&$>>5vgqLpeAfgA5eztbM3lsvdX> zDA;eSL5I^0S`YWljyPHT{T2+kvG3MCLLgw9a>ECS(C4-mW&WDq@Rj6=XC<_y zugTxh&?I?A>u`9w>}_mPPh;|D-a@0e1c zw`_y`Tz;cq#a6r{KWFmhZ(!~ER|EQ)uWUo1*V$O)y#zlOtA`63BvCI5*E1i~P$(sunZUxdSst zUM1b_K~upQ`WNX`flYY8tQ@1Sg9bFW<-IW0KY?@hbyTbJMKn@n9Fexw_saFH?h946 zHOqF`8iG=TjX7!A+l+{YW2md{Mp)oqj?K|2IuG3Z12xzDEwJZpI8)A|G^kIMRyMP- zp8Fq7;b+>jt+JqGSR2E4PF-LzS~zxG8Nx3D{zLf@B=y`11t?e)5?o{nL?p;{EsL6o z2L4$O#QCzc;dPiAo^LVS!B0ivkAaS;pd#!$X9yz9US( zMD*e?{RGkT!}NNhr-bR1>v-$g5BKlQ-yz6vG0*0yt?F6ex7+c@M!N!5-cW$#>((TV1dFvtQ}1I!pe7 zy8`yWwMKN#71G^4*o3$7*5qSua_;6{m0yJ*4E3C+cI4|kXcA2>-{J^2@Y!K*EBpTy zg+6B^EZKmHge52JnR^4>KEgA@Wjj!V>rjK-9oM1(kci!vIue=!AESk5?aw--jBiIk zQt@R8a=Caej5$YIy$Q*Q2ogaf!S*pQh9Nz$E~nFzJnvpup66}O^X_c^7H5BBn&mj( zsO^|_+$Y8~x0>8X$K>unyAfi}HlON9+SNW*?rPc@N0n zLSK>h*CHJEV5C6J@UB@To(V*uQsk-3q-9$ICe?z+kuCLdxy#Sw%qHFdkO^{$bMI?K zn*f?iehozBy7##d%UhGb7qK<@d*pTKwF;l7M7t#WnB=}DNbZ3KxdLJsb6)O-n*S@Z zmcPfqleebqZTWFT_<-~l+GxPshL!wvXkSzWYFYkKMZk-^%Q0EC1-|=9%UP)5d<>4# z-ZlGYALck;sm1P}AQtAdN^UQ#leSF|NA`1MjvVY$de~p1hImg_a!&IP>NM2PAOVMY(reR(vIi?Z_phWLQh=` zlkGY0*B++@`ngZxC>+=Bw7-FBrORd)cS>tEvJy|j`y}6x57CH};z(|qT%~P8a$O^@ zh8f>EZ-ZghLf`FF*t*58;me%2Q}OX+3l0b@cn^aeTl)LPS?O%c?z5p$UVVTLwf^}& zUpqgA5Mx0)M*o-4i)cCB^B}qk{h3bJg~6C^nRh}yl~&!!#?-0Ew-$|4@cn%NM00&b zDgK*9?*l|&J!HyxN4oD#(lNZJ_H7iJ6arXrFO_SpB9)dxjRBYQ!vR>Lrx&h zc|>fkOCuc9|u}bwIGy0Qmmt{up>M%2I zp!vnMEDc`HLzHRpjNp!>oN6D&{+haJ0`<$4-=S_Y@xLE0>ZNnZLnA64E(}>!iP8CV7UmuQOfMiGm5IpX2Eb z(!IZ;jpwdhA(m9u!Ki;z(|5NRuVB=1V!^1aLvjU53n{q!PBR9Ex;xmA;Tjo79m;t# zngd!?4P#a2y2i|MmXkC!*(?mzcbc#FUwt<48k#g~tym9Bo*UTYb=eA~R>=@eTt0_s zXCzv18n;i)xpuXh7u$#V+|*<-0_XbjQe1s-gZ}KnUD#H71F~1g zW4D`@s$yvTRGK!u&r^sK5Nh7EdUS6_8^ADxW<(`@NAl>YS6rBdS3Ox|faF<8%>*rx zwGe8Ej+c?N?yr|68wnvo|A@5C`%ga(VJn(bEyf65FFNVoG7#mgVkY6(o<YV!nWK>wwy-KMpYq7ErMQMw!3$t=dyk-t?C4}dRORsTn>3?~_)g zLPpwVmX~AQ!u2t^d69B!LVTrPC!=3K`;BJ4W$EaTJ*pEHtA%j>(zYq0oD4O}d`Ncp zML(ooK?SDCb(IIJRCT@0#*bTZ${ol>K6t*moV8F=k9d4~3bLYk5gn~ux`bySv)P1D zE`7x0iZXwsE=Ci;jsKv1;9k~EGH09NZA=fIzhEUCG3FRB4pNO{?<0GesW$YMv7-Ag z)TiKH4?Tt?J$9cRODQZ=GI6N(eu4Mhf`iyr1_OIrh>yv{dy?fi7B)?>6lS(!s@%y> zkf3EYv;?~Cry1*9;D>z(X(Km>oJYe(*v7%kMxHCm%ARy8{`pNk$r~=KZ(dKM@tRLS z#9JNfa4O?&{c>c!#&ZfOT@y0>7vDkyc(M?Hp`xV4f8U&Y6e^;+c6A+e%*#KoHoo!|Q^$j6e%JxkM-QFslcW zkvvZ#dio<;r(zwn?+5t(5O!O3??==d_CYNw9B0+bO#H;&pf9Vso$_If ztnP1zU>Ol3t)9sQXz{R^a?L}o?EIO${1EeU39`YQ1UE8TZbnyCa(oyMov9$Jpph;2 zwvj&+7s#`*FT%8Fp_7BJoJ-)VkLqRyudX|zG{It(6TV>j~gS{8JDw%f~|GeKJ+Wz`|cuBj-OP?Px1a@ z;4sKQkhKT0wg-tc0b~0q_WY&L|5QE7Pg^NC43Fz-XcqLoN}%?j6c7IbZM#W%OW-kh zm4s38kP3WBu-|h4#l*p5Q;X+8pxn!z4T#!(OioJlG!Ugc&h}5X*jqenfVd#r;&CB{ z9*kkC#_TX)M_|cndVM?r{m&bbIyQCTS>zAC@H`*gN2xy@WLLA3Rn%@1>a#naLxzB-#f+N zr+MDE!oG4h`Ki!*R#_y_+liRJu^Kyi4W<1ua8)z(w~Vu*1Q>4V+|uE-`|2%$$*2xG zbhjgv>(Efq-!T3=uHr458-{#W-4337htiTq&@l?P`t;Gj0Jxk^mi(ihqi*%h*YHAk zb`HJ2M9<9e+;ThKTq5t#Ga|erp{-WD3m53I3#w7AEk{_jA_3f)zdaBQoGMiDZA*SD zl|{;=9qfxTRlkAb@aA9KGT<%f_CY-u9f zzC^0!I5!H!Fz%P^zro>g;iqsOSaZmVDo8S1*}eHY$DtMjHv>jCtes|GPlD5tAsy7h zl+FbhXqY_u7v)m@)L?b$b2|z8>N~-sr;k1B5wEY3cOAvstOHf5-ISH zdYo~Ljg+uI7?j@F2VXfz^5u9}=v8oi-V$K@IS(Ax1l|$)(0uy*V zh2q!ocoM}2^LPh}_u_FK;;6GSQeo!M&)B{Kx}Kn4c{QHq&|rd25#%*?qj-1Jh}Zb; zwTM3XCPm+(X#HD3dUY5=PlL4kjWnX6-ZZRU0Ajhnkr)Pf41Um_hSvo8q{^+cM&TAlj)cdT2o8L4qqMpTUzBkO18o96BvC!Lj5WKt zWe)b;7i&mGsOnLPR%$Scs|Q)9%}0GLsi>^9vQKqbORB78)QCusylEXaXo%HWRasV7 zXDzFuPg~5jF0LrC&c1??Bdql3W;U7#3SEN_RFolQ1({_HRg<->uC%14%vy&}dBEf{ zYVBwY?9!yGbHa#u)M#r;DryxWwdv(f9am6*u2JQvt*+FlhROVcBCL_%e;meuMi6;WpZ5Kp!hy)@PR5knu5{lBHJuSre_?6pNiXi`=DP6mU2LFebAnNo6V^p*$MoHKqHZDFvYG2EL;;S?kj~F2RBs#$vN-+!eZp|&%|aZMvJfvZurN^x>A&8P9R1k_ z3h^|Zg;yz|MG5ha62tMbj}VWumxo`&_^P?@1V%LRDEfD@NL0Xy1iT;_Vc~2W-JpAcP|k-i0vkufgCMbhs3Rg?Iq+Cd%+2@Xrx$L3jq? zn+TtOI~Y8R@Lhxk$bXK|f^Yy%HEjrIBAkuzU4-=Tna*eFg{%~u37gOf*CMatq6bF8w`GqaN|D6*s9)ufldS8R^3?8IxLHI77ksU@@83+b{AwI$sl(!Rn zLzj5R6hW*vi{0X)P4q%UacD! zAC-5X{$9fke&gCGceKvc0p=FOPQx!mNY4S>)bH?O0lNtM3U?DQ97eM%9lz&szX>dt zkU~ZG*tPmRw_!fCjoY>NT~>Z-KVLzvH_7EIa=C8(3<&iFWCQ<4@XLrFveBuQ>~OTFkF~Pn*6Q5`-4qg>5UX26VLlR&{6@$JAYVoDH!68E`8&Ay zq+Mg*&E1O>_`5C;c^C4Q=lI`w5#plEZo_KbTC^#$GXP#3+H5)LQp+*cuP<6_a2uD9 zy0Nj-@oNRU6WZ-$$Yzo3^_nv8-No{vb}<|>m5>=tG9@H)eSFju#ulyyGz0v#lpjK# zUqO7-gnRXV!&;*|O1GOAU?v55u@#&mJJ&&eH~LO3><#&|i0WLa@~vT6>R%T0qet44 z-Al3su~gpyvIox?Z_~;$U+M>n+zRF=`pb#C(I@F|e}(eTBfG~l!<$JKHA=S9J7W9L zZ?9=bmSvyEW#>co^A_|?t?YR254$_+PEtD#^=^gU2O)bFmO{u=Fhg8W$h zMjelkFKKKW*%}PechSS?LjKX)SS5{TbxGsjS?oF1lTB({<~FebfjS(u(a#|_|5z|c z7@rCldp6XiPMGrwri;H|Y0ORI9jAx(FpfZ)({Dk=cM zSp5#M`rYBscZUB)Gee&3naJ#?w-mY*&KtBF2KYe`&6 zwt|^9CAhF;WS0f&Lb}xb*E&T|5o7C0;wxf;{ZXR$SrvuDX+4RH_I-3IB>TgnM9-!} zDO@VPRoVQhc5YQ1r|!{c9ZMITQPHK!;gw8Q9@Wp66ne6vFBrkcONLCoQE?bvj%0Mm z?_B-=wO{)qS5`H|%5j?#PFBL%N?4fAg>;K}nk*Aq3ZrliK-|2IlRSu^$YuF$>dL4L;laWK0550QOkgGElSQv>E zI+luePIaj4K9H+w&E%mvq;V?zy9MvK8wHUip4SAN^+_54s?YtKRtlQ9)TZpvoF7jTg{T#$Sh_csn z`lw1H!JCLDuZ6abU6Wjhr<}kK3Ue9-fMyFc_b?kbz{Vct=v^pD+V$I?wt zBfDtR4E&0w7$AC6G-eE)X%%WL&P1nfCSm%k2+YYi=|Jm9d=jaXi(*+miGNN8F(Yw0 zWg!e+3bR?Rx*G8f$eLQ{iG^tjj1^5kpxNk5UEyoJ=>*~i(>!Q0njT8Tq-CQ23>a;? z5l=!)rc0Q^VoaTpX$R9z1O9%p=>>Q+&U6@FjyFAnq9vHF2Ujw+!R|!UB4pdqbRQ&> zOdauT1mBr~^_@-k=;0(&F@C$6=0bBf(*dMPF}(>JQcY3N*45JI9pOIkKPn@ zh%EP^U~$4=`tJxvYD3Y@;z=kn{SAQCWP$xwQ$PILOb+OrZKC zgy2bfT%+sy5`G0k*9YNb!PNCVSSy&j(usy(?Rts=Ti5dxxYDB7R4oc=omfOCr1fMG zgOE0aMNC54XcjRGX@x9e1%mQGiI7&rA}%3qCW{DS+Cm=DrQN|JhBPman9?@zh&k;U z9LyO?vKjc;iD(C%#AMcTCRk#Ah$)7|O_gO;Ld?aEBq8L~;BzuDIx%T0 z2vbgpvtEelEJ~D#S*SM0TqVY=Mm&e5=_V?PN@|x1F==8)>bWMN#uD?5aSNozetQyC~C29gf;6KI{1EA%Pi`4mv}Gqm^?61 z?D__|UNrp?=2=ZI;@2i5e}PhV`+NH1kV+xZf+^5w@3zDA9f-v1kf7V1#G?p=!~y81 z-7Z8E!(`E+OLVh{-DV^G5X;lBs9Se2mh2sFq0Zl}zsP#OO_pCL*aPIY?AyHH~`JSv)Uk&{J)KMaqF9f%MtE1=u( zxEM#JH(@T)(SKV@AzpGTYAf{y%XDy3=&wN}2Oyg|*+OQg+(v*7ic@c7K=OH{OD$$o z+=Nc4_r=g>m=fI>tWwuSuSE(W@e4GR)b)%oBz8f&OT9mlq)b94Y1@aJ`B^6dSP~_&_35OkP+(2`h{o<` z;wnM(P5gn1v?cC75JM7Yphi-kje8Ts@Wco3oBCYbmk4Z$4s@T?zr@8MZ+qfHKwRIPfhv{@CQ zB?1=-7mHzBj+#>3SE?|1nkfjEO^8bb$cja-=f$d~+_-|lDt94q8H2SmMskrFaN-tF zMnxn{rJ=eC=fPuK_zn^wC+;B;_7mX zaMaMKj%ZA#LNtgKraz#KbV>ULvUoPx!?XvDwx5vP6KyKWVf_rL*?1E#CXAvqqf;Sf z`WBs5G#$eTi%$17Wa5qb?qDZ>f@BH(Ql=w*{W(aMATm)DlBGD{gA*1J{wnaLgiAuQ z6E-JgCwU0J1bjW0cT%HC$YHpcLSPS_VJr_mqj7x=WYZF8l#0s0VH zI5=ZC2iK_Jbkt@ar3fw+EJj9s6l?2M@L7xqeFpI&)TrRo7%BVMIXGVh-zUjg9GtC! z*^un3WHdtsXQO`lrg6#1DmWC=T3;n2y9$oQ7}k$9z!-g=3hLps{t9SQz{Pi?vUmMbJ~#|$M-jwJ?0eadzRqHNS3EhIkySBN+2J%GB6MC+K^`AbA& z^ura+an#xeKL{_f<}V(P(jk9SPn1D0EkRQdg#!BSs8tM@^T0`@xmc7_zJeAZkrsmD zsScY-IPn3>>NF$FH_@6;JQFtxb!HKgUxl>zjh`tC6-h)6>$k)4G5MTRESv5 zDU)S9WxG@~QS&fc^2okB#2yQw*J8Pas5?dG#1u;@733~Hfm_T2(D+`}r_ewdqaV`9(QCJ&K&BMY z0S#=E%Xhlz8)zJRUW^_Ev2`IHtr~@-A!s^121Zjuf0|hI=PiVpU%@`>px0DcA?Y0q z+G#&9v5!?$PYr=*ku)2I^t^@fzYp3mEJA%UDcv9R6*#yFoAsXEU{=!t*6@8=TY|Q1?V2>Ms8XY2{5ZhTmx-=9zCck6RvGV zFYDV?JWdEH6Ox|98{z1SHAV6N0_ z4ECZb`z>ZE`m225+T&;d(J$#o5yDMef-(+V$mC|Ia!ikqvcB4EuRM`WNxoPgo%C z56I?P1N>S15Rzt~#Si`q%ZV21S|cW4rzQD(oYw zu#j{V3y|2+tO2|fLerfnN4n3zOxzcOlhOgF-OA7}LTLIQFovcd)-gD!!o0dEGa-G1 z0Yyj@6Sjir24Sr)4$-2jilwEonAdrFAtbvj71;?>kqWb+M;LJmDf&&FA3{ShA#M>s zww_%b;xbqAhV&%*W!4fCxoxA$7n1IRTcY-{oL&y2ebE?(u4iU_tfGAABlq&=mtKZS zk6OYzHf_0hy)pl%e+x&X-@u*H0X<16*hLs0GoB|&BbZu~P&5S|nx#*mQL>2MdVW*S zi~b#k%1u0;^bLmY*a^&}TPY>=Sz*Ky^t)kwDhZ&4f{-4-RFw8C&tZ+4F&9p{0Hb23 zv&w#qSgc`;NQL4qM_&oaE?|nMK+mx3Jt`_B^+E~L%31!Og%T%qMDx6Q5kr4gWqJK_ zr;moyufB`%yP(;Jn>IX8teY5X0I_&8t%Z%*Z!>gk2p#bNvbko@R07xm!5!ssn8rUg zM%9#|(74#EO$fz}7vSRf<*>fh$c|%BN$kKqUI!;1!p|Iz(O-W%Pc9g?C~PWxR}98T z6{Ei?#NMH>$#*%7JuOCmYl!`+!X}5`##8I=f({53hlFHBi0W#Ynnk6`PB^cyse%pB z<=il;{udR6bzGrqq+;`=o{G@1%LP^_Dn;?e82wpt6)sVn0xwL;QIaHJI6)D0Os8%c z;*hl|7I0a^L`9YgHB6CL3bjtPdeR(4=oTh0O;y!6H=ONI!BC}0QA4lis$%qiRFxzw zQ`po!64ojN^TP&B!k5&9jiH3kXcKPJCfun>cuGz9NGRb!ZNgSI!Ej!qcdW{OR@G~Wp2r2Lj!RVbiy?MLg-r!r%GlFT1%e?%!O1@x)MVS$WC;Tm zc0Sk!n?`oGD!V%*YuC!&s*ydb%DxwpE!D~{*2t#N9)Wtm@sR9tt?YV@tX-A;Atd`J zt?cs}S%)fnF(mtnR<=bW+o;OwY8B@vyswo#rjey1N@lM)BzsCL`>RIwyegZn${J#n zQH0!|jA4nX>KkHr*08Tp*#koCG!1*Y${rkI57e+7Dtl;%ZPTzHP}$dn*i$v^msR%I z5W8H%KB%(uRdzzH!p=d#5|$~%KqA&C1Wj&2pZiRBZfIn?dv$ohEW$2vV*}P{o#{c@ zLE`|LEN%o|gu8jDASr(wZCI)jx?rS)8Ub z^<@K(y7fZRINIs1Ran_c3@H;~W4}?rDsfZM$8Fy;o#X|EXL)fatw zLy|6W;}TW(D_r+xppW5Zy0>xNdqFqhPOwn!xM;g_LvIZs^l9jbZ_9J z#D9ij-rr(m$5$u7IflD)PfVCwj9F~@h8HdomILKmj5fkL~7pEaI^AlHX5l>J_nlq&x~QFk{4~hgNo_A3F0K~&ny>If2GD!SdT_J z4VKYKcRjbPX$}`x^z5C^`gE^?co&g(aAQ-D3vz78MxJYzu>f(!c9@MJKw;w9 zGZo-Lqj6VHs7VIv3#j-McUC$Gi_(;bBOzVJjM?}IGPpnBmOcS7RFTkyV(9Gb5(`kw zp3r&%Vv-&?<2L>Pq7NdyaGOcNwTgmp466`!iQU9_mUs+gufyGi5+~S{1T^?W*);@p z#$}W;s*i$FGbU775vqrZ5SjcVdg@S={b8dqy%%095YdY!@3;vMwE$i)7RP6J)nX!7>L7D2jxt}G#U{E!^Js_7HFs#31#X~p)2J$8!8eNf`m%KLWY+V zT!K|ALbx(gBs3_Qvk`$E?^2Uy$8alzIBM>Fy5cBn5$}xZ%C?;6i(!JsA2v)ZG7hAU zH%pI0VBBn2Tx854jNYkm;by}`H6JEk2mLqPvDG!$0FP1h32z`G))|c-r=zmhg7pz( z4&l~q2Jt;2-xBc>h)xF7I&R&cKnz7>5D_auT#v|j+|nwHg{p2{D2C#&%k+@gVP`th z+z+wYkiQMLxlxnG5K5yYp*>ohZYxBpA-f1SwHw9yS!P}{;0lH$1*B&4OS;N{*ENG* zRGK10PULk;)k5uVSQ}PM6O8fV7_g$*q(8wpj7Ga?ns3Aj#$58O&}To!vu;Lq1k$Q# zWB=O(1#nl zZbsU1pvT~5V|xzAW`m~ZrSVNLdIZ-;55VehOHmljWW^vc6jLk{#2llwtPh$=7l<`L zp1!?crlDI+qYuT@G%+X8OlTN?0J7^zo`!K%J}MLo$zvF2L-;1hHsWSO_$!FKOo?d- zR}-5;F*R{$2;T+SW=c#$_=kvmfSWGEdrIl(WNK|6-q@DY_#zI$VlZl%V7*J|DI|QP zh|#f@;X6&5H??V^?-G^?DZNA?;esMUr(+4?Se_Xjpb3UQD=a$GI?Ge**W056!&3_5 zGjvpXw~X37RwIU26`W4VdMh~Xk+EPMAsDh1<&@LtaH@n++DxS}4s>K2H$x1?IE|e8 z7~@wWt!@&;zJhCx;TBk;#!wW;wOmxR5BQu~1GujT5bzig}4hAcYd04JA@zp+srM$N`V; z)EEX3c8PSg*Mgm%q>1gx%Fd`4)HfL%S7(%L41-d&#vu->@o`UR?lsE^( ztBCv!H=AtKJflO2G2F0AH&aX)Ou5r!vm5d|akI%rO%oGJqa+DWHXlItAa1012U&WQ zGU`GxjFjxs6^lu_Patv((yh3SUxWA#5&B>RF2hnA3k5n*UITlma;aZNRVzw~p@PCy zr5I|(SU&lSw9bj{MYCM2F@r|r4knDcV!V;P%QB$DhbXLQF?M(WS#-Fd5cFbVhciiG z4#xNlEGy0#;ze4LMAJ%IJZBh8>kgq$RY&}{Y(3i**G%W8vj0VjsJQWuIhdzN{=A-v zC&geDkDKiV&g%1=cx7@KjkC4P?Ks-sX#f9Ny{ zcl=G5r)&oHexM6jbUsKr_4+v70l+=uFc9GGc?}wy?q+VjPIwwJ(zKL~$hKoXIGqUS zasy&rZlRx2`dLIjOYnms{fXH;DLtgY74>ror>!U(joT}TjO~5{b`H03j!xJ{hAc32 z`ukvHeLYw<7&08UZWM?bMCKDQ1jI%}=v9nPl*V{7kOQEaaF1I8;tK=@vs@Ut{%kIhzucOA&46hDZp(e4U>zxtzsyKvs`u= z?$e1(6aA&^dP+>1?nh)D5u}M;+j|_h`Cdm@lm54w?$?=huRw$zrN2Rmw}3c|$RQ#& zg7_Mdqqw7HDfXqPZUI}RTQjN zt70EcRl%vF*xG9K6|LG=t*C7s`b2H(v(?}CyY@aeH-Nsr|L1={pMCBg)>?b*wbx#I z?cttlif#lY-Q@-es&pq7EDssNDjFk4{HO(i)G-);_VWPy5)J()TmCc>Nw69T2=;?W z3`ArAg7@<^f@b{=nW!n`+z(CWhZT0<+sik+d@aP7X|Qr1Pe2XVrr?^V9?NqSw80^1EX4~N5mWnZyt7aHZ_F*5Ver}!jr zb^1n?#4Tl=Pxd+jNe{6SE>vfNCNFk)+iEn7zhGrQBv4i&JN zN(7PDIz-koL3wRPWFtZbaYSBMAaxmn$jhhc$c#*RO(sps>qfxbfS~2&WtrO9TC_?* z&2*cdHU%(tWb5KU>g6QCHr);M?;&`9TBBKyZIdk5p8?A5z@P_!p_l7NP%}TVkb%Rd z3Qk@x0F+Yok8~_=yBU~K9es=5$vmxC`4oi6WfqzMWg{CgE;Q@kaG)Cdl1|Qk$4V~ zrgH$NV#|8{B&t^6QB(dah~^Vh4s8ewF@uVD>F5D+ z*~5+HjWy+a7V0w5pk?J|xRzXi`lQzoT?}&=SZ@m1#h2+qF_YU&zHcT0Yb?r}rbJoW$k)Sx)ZdXW zif}e6o{r$nIh6=$Vy9y+<40LH5c$6XS$(G(e>SQaYoP@B2hp0JBl2$oS-X7X2Y`4Fk%OnGIpDXKzV<^vi8XKH8(0Cq zRRlSDYLHE)dK$@3PXMt3Uzt)G$*gHoN5~0wYB?A~8Mw*VSzZI0CP3Ibd!|hTS%Ftp z8?k}Lxjv0IJ9^Vd^E(DW-9FIY3=O}cTVsIk_JK0a??uUbCI$#12|2IzrlO~x@2xOc zRw|v-|8ZOoG7@FsWtQJk>ZQgi#CF-TPtVba6zLcQRKdXw7CE#j_wiO>wy|y$TbTw5 zIXCp?BXGK}5diA)f$~mh&4CkJ!);T3{8?(`zrgA%j@;>tSc6e06kObD+yYqrnWnBYTZEqu|>J3CF<2i|{IhCu=1@)tgTCqX4(3F2DjC zFb^Zxqmc;p#SIiB(gq=6r{HlOa>@{F2Z`?@auXAO1I>RTaugwhB;9!^zO^4#fe7w0 zBpyZN5hl(<;$=jBhLCVq1)5jpGt)t4b~$Oeb3yZS6lL{?sYkFsK%xK`xJ<*`UVqARtMF=2d>7=iW`q1LB>JBY41|Q=gs~(s=Y(;|oD_?4Y5bto@)6)v@oO{6+-9{yrjiF+uFd5qShbbj-(g zG`6o}FQEKaM4*oS7b0&kK^^-mBJU%3PnmkPj){M5p>GbL3yPN`hR-PdWrgRUZ|+Bd zI}|1fq3%|AMLoa3cxR=I2^>!KoB`hLSzCHltUpR1qdQVpMonCqEcLr@-iZDq*fymW`fvl zh_oPx6n*SCjqOWuJIc2bfl}Ow$Tdt*ink(i3xf9x!?{0Fxn&~*4Wbhz-BO{2Xj?yF z(;l@mZ)4M_qz6FYUQ*`S4?&q@ z9^*XV8(m%i=HU|aw;-SUcKlok_~RyD4w~%XYGY&qlTDAc1E$|x!Ii#5r8rvJeZ_Wf zpyj=4TFwKXXlFy7A3`g>Sf(qnt(V#8>zp)u&PtdSH0ezQdm<9aa7#P{dqhkkEei=d z5f7fCPhs&_pjVAZ6+#Bd^<9GEgmrklXZZB2zp$DM?ZnDLVBCSC%K?2Ug8gkIZbak; zgtRM>u(#lGH&Wk2$RMJ75IIjE^)Nyp(-7s4JCX{eZDu2mVm)q3c>`t4Nu^7TOo&^2 zVrlZq7>QZd@>Qr+d8qyzM2?|qzN91&Z-8i@uQMc0wBfl1hIXF)dP2F#e%a%u54akA zT?j3cPL$ErzZs96$aQZ) zE0Nd{bsYKP-K3ib*bT^ChT>9$3}(6=$f-kW4MN}TNc6p$j~nqAn%N8i8X5uX8PeE2 z?2UP-I1eRh!|<@G8}alja-uQAyOp$aP)ZSa-vp>FsNw990iF$fpK_v=Mc_Sy-XichC`nt2hn0V4O!X<&nFP8%G3_C;#!t!> zEx2wS?O{Kr3!x*=I)m-+0OSeOc!;$^i~nG(4DD7Gx*u8^aw%DIq< zI4+PtPTJV$d&&1f6`)Js~1x7_rX4^ojPGP3n)t`A!;tykymkPQk+|TY@+~)ASd7 zWA0?hg0V&~7=MM1d=4RVfgsGMnBGD9Erg_1K&#W_zeJk1XXZ@@f%rqGp{{j~%cZmx zs57WyKrH*%nN$43+pEJz9B8TRfuGj>2>_zCJSE2$VT zY|(1nAba;hz5KejNJA+6OcIrz+#R|Isc5i(dJ%dhQ7U5FsdFCWvf{225? zK3snFBNZ;cZUp=d#NqPGhxDQ`gJN;{^#z!e<=0(+zY{^0UymX3Lj+lV`2>O|5r{6o zo(1gBh|GH_zeD5|1hf3|(F3MhfBE%OBd${}zur6#R}_mhhO&lhF}eI@l>I*_`T*qL zMX*0aB6S2*3L%4FZXA{hW04w#;AS8(7m+zk3_+qCky3;N?)dr@1mrc>>Tc!!3ldD* ziX!_)JemNw7Qwy>iCu_XgOD+ewcLrEhmksn;68xFuMznL6Hg)WXGGpcfPa~T^qz51mvJi5rE91tB9FiKOd6X@Qf% zf}zM9h%la)^As>LXhh&K29Kt<8v9kTX_#)??wOyue+ROxU!=$&#z?| z^pV$H9s@LQX~}h$XA$|Cpj!cDaJk>V0sK%c>1Yo$`4^;NXD^yhjguvonr8)u#h^YA z1+hw3n|wKOvb;BqD)AHo)C1eBJeZy=-rZAE%~Z=95_+ngs^>rLEKJoqrExOVPIcDY ziDtz=L&WJ?xvZ?~!JitTl~!F&GHxHq(RV?`Ss39gpnss9g%QpwCPwjIyBg)l#rAnz zmBsUBgc_rD4bIJT^bB&GWD0rpRhG>u< zAbl5tY=2y7YF1X5HvV+SO-0SJaDNprzebRycfx3NJ%TK~a}gPdAa_0fW+((KKjv$6 zbP>M{u%$%iB7O}b=P|)Wd@CZ&2y)lM$M;UA@iqfA>n;u>fn#MIV|J^((X`LrUyEL6QRU`y9@{~pFD(0 zF+OitaLl8x*T0mY%>jH#0#>_aSl*f+(Vo@10D^SHwereT>Ld#ODxs zmI*52tB4#$@SZpP{4a{Q-+*C4G_j~6hMbJy5Wgtmdm#A^>!2e39g)wNpd!YPLuCk} zh{F)cW`c@13z4%Byu*g_Hz?u;1HyL*G_od0#x+lM@`Pi1T zEQjYbp*Bl>Y)_`Lh433`#OE;{{}uc86I+g;;?t9m_}uRECHp<=)nuf$*>W#Y9bMs@ z^KJHIS;5HyY@5B1Wa75r0oc`&kagNNyGa&pvNYRfUmyutnr*W$XCnS)E^RKdWq~yc z?Cr4SaBLLW+hGqD_KuN<9rjpByhroB!=5S$vE|*i7}fZ<6iP0(<;^prYP#wzb%`DS zBk&Z@-zTX(wk#9paU(vyIs-YE+v0y%^SC)Xs&=iV9=8YMdbc%p6#o6vQopoEFvS(I z-0=L1owkSM+T@xTW7k0du)1ETxaL^ zh;f}=`WDD*F|M;~B_U#5XLt07ah<)RM~v(2ojqb~(qbH|Ti0q@XJtXemMSxq^K@R@ zpfWp$RcISjW*10((o<#jqMj`JLZgt$Tr(VDmum_@d7c$ll(Qe=;0Tddh*(OrXW zbr1^2&!hSaMvJ@svWgsR4<~312Ej@MxldOFdGVtxKL*{Fj~Z`hT?ka!+`SR-Hz3I7 z?p=u7$pkldA3@|H1h3Liif-;+(P0R|%V9L|zG011RlNh%mm!`ckDf z19LAkZb8WAA#q%Hs(s)pfA>?^rZ8?I-=0R0NO1C3@Bf=n#?1xo4aU5)_ z#(y#ZBpy-X*2OQ4mqZF@x$*qH?PL12PQL@@ZxFUOcQjb)sD3Myg99$vT6smMQ^!NB z2;(E0YOU9F93+^j;Sz^Ya~SX2{QJ4l7x|6a#1h!k8(L`s-CFgxPpfr9A@J zRmfk7pifRNFq9P5MBw)4_!%B&%{BnEAjn*|9g(d}aIV{l$TbLZ?&4!Rf*p0)ZbkVm zMBq$$0Fir{AogKI9!HRK7a!Zz*#7MJ3zQ!r0%ynHA@T|noE`s!$h!#q%qUx=BQRO= z;nhOfuz6L<4wQrQCu_z|12xe=U33nhwq`&os&5$D8gjDt>mpg0``|&#kNJoS2(mE$ zI~e$k9C2aZCl9@ZAPe*Fo4SZnF4W zL|$fsEFMGTk4%uoPY~hlYhlrz2=*AfpPaxVAg#a|rh4w&q`08wD;kz-O8K5Ry@I0WgZj8XDGTYL*2G!Kn4B?na7!_cd&25Y zB`%zvI_^{pXz;aRzbn?T-=)N`#}|WuxBsQ5K7f#QU&RHF_eCq(~prJBN-TyIiT>X#&169-{#D+Fhg zfD;EUEmf$-bHO+^US3l;U-f&W53;SSkpUD=R(+XNR}s7fTSCa0${dY5Srzl8PjPRi zO3506X*yH&>kh~2w`ELI+WWBe`Npy@=<&L6Ex&47{k=~{$Nb4 zNFrw=R2EB8Ct~y?SczrmvWZJW*dIZ2R5e;H@2fmb4WEf(&BY+ig*AKLKv)BnQo3ZYbjg>}Bi6WiC{Z8luH=ccrtq;;rm;SrM^-e}$LFvg zM?X)`=M(XQtl9@4!24Lqi-l|eTm!`UQG55+Th_E0i6|e>==UI%+Y##Uuk_Y^g>M1o z$4EYbFpV!gy|4T}WPO0-n+QcstsAXFRO4}za7zSX(j+(ohh=3|jbs%H)F_A#Ds2X{Y0nK@hhPu`7|kgCzKyW}k#@-Axj$r){kR z3I2@v2LQPjLEb>&g9YR*8quFF{~YB%Mv!A7A0{qJ)aQ=JL_VQeK((Ah;k!n2f8bGisQA0THT#0Z(ieJSLjoSS?!Ti!FG&L6mx z{linbML;V?$asUj!b_5?k>abI6L?9|M+{gWqnsymdBo(&Tqj`K5#*&Ce!~KWJ^y5k zcLDf0)yY_3r;$@Wl&S?}F)an~0vh>=!xl_gOAIp0d&(*Tc&CgHL)xC-UEJj6)Q0KK z8hYP)^scEuSq^tgUd~*Iab+Jt$;DuN2ZH?s64xQJ3!(bAF*r^XJCl{*qiPe+BK~ne z-45`5#D5owgNXb9!TZ3}$2%`%ME}wTO|g!L27d_v1HUmvT*7Wf1wEMh z8WKe@z=)y@Ju0GjOkV)(xnq^cAMe%v9(H^q%}d!_vWy-=oFBE9>Lcc5qnMZH!0?j@ zVqP|idGV$K5JAk#MlmnrksO8~=H)V4jxMAc5X8J(E|%jlB!564nwKjG^chm`5s2pH zN&%Wa4XTVViMNIKiFxTnvW?k)1xjz(1b##g7a6sI zm%)aZ(ccJ=280+nu()UR`)IaRK(zA*E{3dS^lt*%280Y2$mqWksY?-L^!tbbYbDA# z`g4iN(SJK&_94jVf5Y^Tt9iBj(SN()_f(_b`^+FkN4Kc7f3+7~$I&>;P*CzpC$%s0 z2{OPDco3EU06|9J(}+BY;FTG8e+256GHeH5$rqUVWXw%Ba2#_Vf}}PZ^s}Q^|GZ8E zW5%4^JmzK;uLg2VkGq}DZgV;n-XYL%&eWJcSs-cKu<(@tsX(y*4zM;vnh-MJL|A>3P~7J#&BN5k$8o^~wVEaX*Gr zr>gZnIyFgmPIXRC=Ty(S4}jLV70>~7wua;@Lp9{tvRkdY@$3ZxxkI)0gDv>}8GLcK zGmihTkaXi%>HcL+z@y)sbYrQ+c^N9{#x+tKuVy9Pxb98#_1MuLGI6uHrj^L@aonm+ zx<(V?1vE|QnlcuS?)Ff)Gl_qMFnT9Ue9~?m-vI9}={g<%Gc+J+kHoFX&nPSoU2^p! z&Vcf$ zj?j>)XwhVWoryY0+J0ANi%1W#bS6!OAe+)p6GYG}%d#_?}K=Nn(7Wlnj@|Xr0KDL_dNx8yxXtB6 zDX!6-k+2)GN<>bUBxw1N7)_P65($(w3&SHJ!(j0D0U-g(Q1ZehSxzVP)qf0RWt}+) zju2jGz=7`YP&sZpx)3=7nG@=0uVo^K>)22Wqi2cJlywC}8Mw&c(dxye;p7B4BY5vl zE-N50;Z6{V`@4pnL(KToG62?Xr@)eACo79Tk`GL=6;sYKgEgfGYl_AiMX3g+8LSa}{ncrSHZ1_(Ef!)= zAuX?vva&t{&EVxqA4EUR@@gx1h2C9>8-a!fuhNN74gS#0*XSl~J^8nEvI=&Ki+{8bZg^ zodD$;L4zLCpw@`=EMy(QFK{i3OCDW%L=7ULvGI3iBI`G*KZ{XoA{rjLTS)POp9a2L zWl62|R_K1STQlM{h$3!;2#T(h2nuPj+HrS)3KH4GvPO(SE8{+sR=yYo{Y>>`sRsQ_ z4H3|8bq_G@gGxD8JBV&zJdH)6PDV#t=$?z!B^Yh?lYkj<4F-?BQL(%D`W@i{=~N$O zqZ&gLYeX8f*Dex3-jLFOMY>wjE>bfz;6F%lf&lVTlLnmN%W_K!N(W2n&S>di)nB+D zafG;WQhFd-8fX50Zrod#6zn(UFAW{uOp>u}zo|EnW-i6>upc20$D?JBh=q`GZ$G8S z{mi8f2FniG6>cRD--Lt+NTA}c)P79X-B6yyNk&n*rwmadokbO~W*%Z`D|zlFjYcF= zSu$mvmDrV)$Jlx+af6OES&197?qgoFmAFaATCBv)Sv;k+M&yB(vq0LspY?FSAU!}! z-0+#v^3>*wUeyme;}=y z&$61oQwz=jqzKmeyNncT<{L1n&iyQ#9WA>*McAA94veUC70c)jX}qg+htwZLR}7`G zjE{o3)0 zvX?djhj%t#0sL}U+0f_$Q{DEkc1pCHJc z%>dwSjpKc2gta=u??kLdWXY4j{(9Ar#;5~9ya_Yu8j6hFH4sI8Nr@df3z2CEvp&Lu zAO2Yelmf?0_(46+k>2FE#s`Z}IeN0ZBL*&jQkj!~Nc=fS-&Y8#Bd-EI^Qr8J|GhNd%%BGnIf{Nn~!!tV3iig4zD?(Z!`f zroa91s(_qw^K(9b!84~=^Ernz8$N39rfsO<2^3up)Qt%CQ6zqa$g@m5hs2)|c@H6h zryaG10(N6DeW+UqWo7x7=%Q80U`{kNK8Z|7a@ahfOaP%XE{clM-5Qi!29Rz9yBvwT5cw_>^O5*5B2OV?5XEgq&TB}$icoj~5>F!c=?hVEArgN< z?w=66PfSH$LRp}Od{HQNas=qq^Dhs@Zu`i6)J0Kd z{u|(ACYx$NDZd3uzef<+e2xe=1w}UL*dpqKAW~jqD8ROvSc>VhsFcqH>_`NmG!2m{ zOo)^bnU5e+_VMkLDMh6W*cC*klPqeqof9e;r2sb|T!F;->M$!IVOD~ZY+2dSruqW3tRYeZ zdVXl{!UQedGcpyqVH<}v>8JLGkb_LNklYK3bbs9&0d(({{kr+ zg+|2m`hifpH5U!B*yoGEpS2{%k`X$@BUf5a@lsi~k_?s7 z9+>8qQBD{NLzHh!mPLg@*yXx8uzIAKpVg9#5jte*j3KC|VyXZMM$9=P)ln^?BEVo8lTg#6Btw#fXsn{t z`lV=V@`>ZT&aXFCzay-0;1FzG)Ej<>xRg|H^U;v1h9{>dIH#X>`ig$3Dh1YdEi6BT z+h`0N2&@}{o>@OxdItY+CBsIl!tt-zxg^VK>8DlA2)NZ~o=LWuQqz;?i?oda!raKt zY^OodyBP;&g-16>IsChlyS=-5!nEXsi{GdT4EmLK)(ljcN-&zGhz# z)5%cx*uJ=M5G31c!o*Lqw3)!YQn8c}$TnjcP>J-WHgMaGy^@A-TDeFWIEbcqH9J&J zbZXO<7{0d;EmX4;^c--pir0|H6u-Edj zfsae^2=2r~@qcPUx8M=D6OZ_N7<+(^!+adUL-99b@vq|%d>aqV8+v*yW}!0z#D= zuiT-Q9Z7GLV5CS>F!IkH%tM>s$5)^#)fCQ zDV_=*p*V8DOG&^b%;S)@lYxf2WXf}Xicy9LOXC@Fp4eW=Z_ID#Xy<2^a??ZtGu_kh zt*R<>SR07$2;_K`1k)nH?w5#UKhi4Txd}q;)eT1p;e0=o?>gQ)$XCwKHu9Xf7 z#38yHf3@mnu7)%7c9QK;yIgbUE8wXZAR?rFD_(~vwp{F<6&=)kC zOabuG-hj5Z8U&S_>^YmdT?a4Qffg&T(+k!suX`8D`nZXn>L%HISN%!r>3mns$=1NmJ^gM_ABt_26*6@5Oo$uzG>zC-9hXOTqXolGTqzTQB@sT)bx})0&ZPYl4^*OgIQR5?oISS!~`ay6I)SfuS5tgs{g-V9AbCBV5bUPvT z)ALLYG^*fbA;F>(HgZ3 z-;Hfj6+mp~QfZQ%@6Jck?RLi#(R)r9d?yyXPQWkI;5jav(%a%h3=W>xHxPMsHrGhobC<~l$jhp&VBgDZ5b&w z{QkPp!z!E~`!rF}nKN}gd0>N-^JVU`8MmpY&iP7fD)3T+CGHsD=5@JaW`Iza3oIga z8zhe6LmPrQW)j29D#MiC#Xm6q1h!K0?Aws{Pk)7Yu^q6c~R%R)HntRsu9vlrZF7?55-2vmp-= z(P#F*BchO-;;b-+q(SG_t(AIUSD_m4zQ)g<2H)W0+x|*H5 z$Hu$>;$PFrzd>V8(aR#Eo?h;=$W1DC`&79}XQPgp>EbxTx4myW1onUHZ9S7fCMjK7 zU5i0+A}4rg0p=X~uE>Gg`b`>8qPzZjhki=$uE*662#1e+(s@}oFjfn8oumbj&T#<8 zD8aX(1UvOSo#}M?$`C9eT{;3m=eSjArktJIC0mr^cvLy6qRKHHr1wBM_G~};&B}4& ztSZXEN!0b0NGD25eH}kWUb>yxF)H&N_lT#Ckjy(AogxX5N51%yHfOd{(asw&;KhFZ z|1tYRw~}|k?L%K+nC%y_rMpQ^gY+njPCBRfK5lv*BoQ1Y)*{`T?}m1ZA|;lBj09;tO_q&V!D^b{Iz7BJ|OtTY|S8hlHRxAHoGYB+j|>23C2_ zz1urcOGXg>%R&*c3a6>YSotPj&qm9GAz zCcm)_O8L!AzQr`z)b(#R`C`shr)qKx%eu*Xz%b_9Pc8UV8HlC8+>4o7?7HW8`EIiF z)&&@Y@V3w`QqT=6y-DT{Txa5hDcuCY-Qv=Wzk{_fR`^_ZeFqun-($d?#{MtC`fOFh8}KtSWCD&~iR@Bumz#O98zN+}w+MbirjsDntm6Q{G2$t)KA6b~ zf%z-V>!<0KONN*uj73Rk-Z1r$B~hKtGW@e$@6iL0HMD<3>O8v*qacW_7w?^P+M-HUNe2kFDAl{hjh_iY zU-|#V6gfD6k-jqSZR)L*)Zk8b?`?X`&Z%Hg)B=lnU{)|)c_m&68OU(mc5g9J*h!W7 zd=lj1Kq6%LyXdH*H-y!s^O)rH^;gYRXo;R5`${J}f3Ss+wu@*x`y$pZ26I8=e>aS_ z^AOhauLu*bMwtK)g8#oU@n$R&KR*Q%L;g)B9wrl9nLi8%BR%iHZP1~SoLugt4^;vK zn&c)mh&Coc(4hy#hrzPX--7X+SG6NZ>)X%yK726%ilRP;(N0DE2JIK`AqSY4&F%y0 zC@c!R5_c3=4zGyy9_1Cwa$r;mU@k)zRLq;5j%81(JBWGMbg6_Cp`PzS4q#gQ7l??a zIzLt7-9a_EK98HtnHdg|wj}AYIRjiml%p8TgCa5EQ) z8XI53_3m)7ouc>0D&2k{ru7?x`>)m!jLUjVwM~%ItC-^)dIy+Q(=gY27h{A%Z-;Qx z!F3Qz@aVkfPQKmPI6%<#d%mF3!f1n9BNk&Nx<9PUGB=9!PS>I=|5?y)*RmeMU(j83?z9f+et# zN$g;*>*0Ttz)(6D_)sX=4diyTA_Ex8c@X{XVB!Z3B>TdJN^>dyvGpK)f_dsdwb<#*F-A65oa0+d+|!(E5g&$Jtb|ESywyvLE<2ZO zJ1TwX!byy{E}SUwjkpF>&hNFY@z+|X+RJiJ2fwnL)vf1nXESCQZy(2|Y}%Y@=5}q7 z=^2DL$AB?xw`{-7olSl*F_S}>YDsqcDQP?xox3rjAuy)$^P=M!w)A zi~KtO*eVlerxE)V7`xJSG$p%5#v$y*B|9IjKfrEF*_6ECrS1{f;lBvEpt(o z4+fl7N8DkkBBKP_u!oLDMD{p*&!~STc6#rZo$oBz>ahSCSc0rQ?m$FGoCk97 z+y^=>^~bevicsUJTw1g~#d*f9kBa4)3!-9yzCGh&T2A0N1Y^ZVu`YKeX;=9n@y3A; z1Jak>I=Qo+KnwE;H1%X47^ILht+U<}hs4qN-Vt^p&<3KO5Y7W+Hi@M@kZ8G`^-f{_ z5qE%-0guf&v$I}IDW+AU$=HOT<)-R=lejZwe!;|>r|p|oOK$df&gF9G1XiAh$>45R zDt33V9Lm!_kr|HxK$)daM)zjGc;wC4f9oLfub!--j8+0AvfF0bjL1m=Ty04i;?A zj+lvmmgaLDw}~3K6X1M)%5MB6HPuryOPqh$umM8yB9H{`Frk1ax2{Ox%M&TcdLQdT ztoKW(78qI|I5epJovunuzZY+*1e+K^FG2s4oC7l5z~@Vs1rSbc(^FAC2z!)Le#=Sa zvNrKw{pU=FoW*hKbdzVgLrdI#B`(}Sx(po6A)2sD+=Yk^$#)mR3!t&B!bF|vj&_m` zkUhXCuU|H!>ncTHsop0z_;nWn3u0VU$tO0r#V%xd=eQ$KkOkk6R(F~naDyOgY>0#L zMQ&<^b0X^0E^%fkG4gpKpOyP|34r0H= zP{t$&w|%tCBfIp}b4Ug}_3UAI*4l@?-d!vpPP&clC^+52O3)H+waAM<0-rr=5j(4h=ck?-OGTh1XyQrI1p z0($_eII=_wz^L6R#34SwM_`-&_e7H2g#jYl?i?fVP)PVa-CI!O{{vx=xd~QuMwC~l z>NXK?>SA|r9VjiPn}6+vqK1D9KmoY{etEAp_zm!T@Z|oK27HbNV6brZhkZ(h$r9ch zqT^jTpGKR|Yb2ghB;foSa6TvS{e!cI&ay;Exs3?51|);rh)XbcC}U*M6i+KA|2Sx$pq{?y#?iKaPruDnAk{ z>Tl8;(t9dU-=l;#85P_uDmZYFJK}5%rbROGjKr0a3v&!TU+BFx8(r)C67-__C%ZAY zFW@OfAHJ|dX` zGr!cGF&~cYeOvSM-5Fd4qj4`LB-^JmSptB>?NaCOD7nt31{1VOO<6EK6x3(|%pzw9Mu z=~onh+Dqux2c&~zIR>`k(+uHdF3>vR0b?bHg3#jju3wze0_sALR zh8ThT9-NBfl+Uxcv`#_LOIzF{0j?SC61U7d$fJ^>Xjqx?3gOk=0m<+PTWPsgYA;7F ze97H3+?;eiy-;>cyo%#)QeW@|+vc z6(`sPsp9}BaITUSNKkf;ykq)of?aczM)(CNTdKEBnETwpbk9jbTE>0(??!!ZRP)kS z+*|;I2e5Yx#%{xiPIZ2=RSvFwx;Q3-MRk6&)%1pu{K;K%oMvs0?`UudmW?H*Iwqv+ zxEy&-9Rn+U_VMx^ZY-!79KgXUZX}2jV{qdJ1NDYmhh3}EW#$CPgX!tq8&uAZxv_cb z+dS;WPV`}#b6FgLWuQ5~-qh(1zEHC7i{qt)6z7FaSjtH$7o53lKetcKUD3z!{xnEZ z9?-la57VUi<-thB+Wf`Jj?rJxq4e2=$(tqe%pRkaaIb*N1`RwW+}O@Q zo7fvgC|R^h6wh7k_Nzk@wyz(r>|TP)M>OE-a44`W+ zpNSdu}s}Do1(XsgI=sCXY66H9p z%1!$U_34|tFf^xe#Xl3;j5@@lV_$Vuy%XFQT+3Y74GwWL-zJc7&dxq$jz!|xSS*F0 zdhEB@A|49+MbL=YA})AL(P3&!Oc_hGW;2Ep5G>CSbJ|hI2FjDL@ciUf9~ti=i#jvA zO8B>RHYQq~jRtdH*kt3nvd&J!b@ZNWIGj2=y-G&DfrcH^*;%;z(gTfqzB>E#DlrZQ z8lIQVPFm#-BDCh3UBTP&Tu{5Pw*I#^lDexh59@x4HpxxVt)Vh#n+O7=9!={cZ_G z=|h+)VRDmr*WG{JgY!Z>@8x5VyiG%c9dfT5za3Sou_IjDS{Dh|c5a5wc63(Pt`D!R zZmDaEST$XZO?6?s*Qv3k!CJmzZdG_$OJkGO9>MQZH$>{JjqTNKEbgl947XG_N38am zFbdmRTk67{tyXiSqocY(a_g!)tHZ3IE{s>YL;!z5byHJoE$SojBD_wmxum>wskN@7 zwIv*>?hJ2S+t>+G)wL0;s09oHD!ig%et4Q@Wd7oCQDyPUg5}nNr7N-vr%%t$o0u~( zXMFa!=9boRt<8;{!PmI9*2b33NIMEUyILYM8zL={_QqNytJ`bW&fGL@ z%D5?$vd1-Kk1L&!J+7m(Ze}eAj;r@6oj{~sgfyA8adut1@>uc-%+{V_H zne|;wO~@=OF2u9CV{=RGTEMk-b&Tt3+1S`pH?Fg~rYX{a!mlN$3u>FHJJ!Zfw(7w6 zdMiA)wYA9#H#diyTQ@|St2@?*BW;aFK=TSJ3c~YOEG@Lq;*Q$twn&)&h*e&^WL|h_ z5hPvO(%4W~-4U7J+EiD;t`8TNw{^ADcCr`iBK3_e&*YSA{gzo4;^idO-&rT7(VS*O=C-S`)0}rN*&q6rge4JPwNp{ZEMSh zNPA~bhKLTj7p{l$iTFCg;1UFVAY$J)H0T)Q+u95*fOtEodNhn1VJO=p5E^9ET-}Km z)f+u3EL~EzxTw;SAq08R^ztQH9L)u7ZB3gCnwtDkJs+bPT5$gO@mdL}wvA9YbVo;L zhm=f^=R|o7B*C6#a?@D)=LDnJ{W z^CnCKd$e~NA{=RNhqc51_87a^V=C*LsvA12!nF|$Ju%fXOB(ZA-QHfk*@`sRU_y?~ z*JioCsZ}V3@z**%))k5q#vg~uj3D-*S$D6PVi?zUpl$S7UQ#-51x6NH%-#p!25W6& z!&;wBnOR%3QfU+nzj8EkaZv@P8O$m*P1We%u9l9*hL%WOcJ12gc57`lMm$VO2d6fl z6vuz9OdzH&^QX6cjEBFDxo33l}V2Tv}K_ za}25U5W zOgS?^M`Ij^a4#ux9B{rWuP7}e73~x@>e(a`I~%|=J-@559Rs09Sz%=x@G2F$J;m@q zBF*p{FyUi<>6<@1DcJ>jj7gi z7pvjeqoJpMQ~F}&IGAXWFqAr}pHQ!g;zH^{xXglY*#@~_et`Q*J=WeiTED5Tu>o&S zZbZ8x_2T@&i;0}CJwLiD;$3h$@ZDxw45+^NduUb}ePO>7v?Xrq!WfjX-w74nkT>Q0 zMq1hNd6rS!F!<3^$uudLZV6w~g=S1Gt|-S8SFl`Un9msm}3z3G~2f^ zJ_?5b+r6TYHs@q-LI!j3%5X7UuF7!5+{IQW98bE2t-4t-Wxkiy<4yQEdDO~zMX+;4 z7EDGXj3~TQ@lEiieDGNdwkKe=Z|*f(#p{n%cbOu~wVXOzTVX0T(ig6)X$qg;g*671 z9`CSihp%5;-qi-h=+%8t7F1400X{3nvsiT*P4kM%mKSL|Wpr}>;?e?R1>pcLE+{W2 zpT4)I-8U{MHqq~Hgh=%muW!=*5?!5P&f`Gpz`W!4Yj|UIdkcEiSO0|tC-$l`<*i9Z$UhPU89C?nxWYkgEI7&wjfrhU+r ziUK880yh^y-|FZ>z-zafBK5F^b?dq?32~YXuZ>{V zz>>PFsWZmyiVh%lL$$w5-`re-!WNPK@itEIqMJuvT`ocC}+2uAP-$KA(14XeN4P3m47Rn_%@iux7(dAxkJILq{uI zQ1rG|HE=kuYH7iesNK`{NLuX=YF}nC#s;Gft6QssCQIx^ulWLfPK!e|hVJEQ%P6xM z28Z5AKzGvv?pWK}-WfF-*|dvbpdI~b{6Knu;4$hjEGVjg=0a1s+0oht5@?ulkegfU zx|$+pv8;Dav;bk$v>L)rQORL6mKT*RE+`a_r585cf1zy#%<`fIMU`c&1pd-mj7Y4M zW2R7Jy(5wJ7--ZTt&MuQUuQM8z%ONs;5Kry=q&@?17T?iE!(<@IJP@vK*$j!)K|}nRHr#BSWq~qY znD$^URI;To(+WNku!Qzl|=iC8dBSe9WAqE?J2h9K7~;SJQ5xhskn zV__}ALj(PN3%LB)acIM|wpIv2C(H=*HC*=;pxDpk1wI?Pjv8La2^AgM34y|C5QmSZ z>g(sUvWn%bydG|eI6vj(MdBVBA0EBYjx|M0cNiVzBtn0K4p3)nCycbu4a7eIKw>z{ ziVBMh7IO;$+6)Da^?DoHv34N$rLxm!$-u4S&V=mQY-X8p)DRVe%jr^+&_*8Ba0vUu zkZl1{6E@iZ;$*_VN=VQweDbJsIMbeJwFsDx3$#JrTC%V^V(F%P}!l25o&iv&?Mc6J{QZ%m^D~!ce7{Cx$C&WXmss{l2K-Q(bv+cD!sJowW{LK^7rHX`M$iUXlWR3b+{P&V7<*UG+}Y^QaGP9 z3Q^|;Kue3^!qIVRz-IMY?XUEjtD=63*-(V)cVQwR9j@8Bj34B8_IykYy_PqSkD1|` znzXa0ePi~y@nE@eJ#hku6(ZB<`(R4onvKdIvm)`^xkC5DdNDOI>mnm#-$LkqGD=gr z3bN@@V6x8%IMUVyAMV6m25Wf*jmkV46~EH?#n9Q}Rd8inySN)E^)4?c#x`7!5z{6N zCcUw}gIy5SQyKxS?oc#YdTP3|sx}KsBPp>tB7mPn+oBdjM!f0RHw#Od zLc8%-S~SarOSpYkj6Ei-Xf{Ws#~nZHQ2D}?WbYlvyxv=m*u3fmJgIlG_pT$1crW1I z`;K|NHy-oM&SOP*NkK*7Lg^QnZrE5F&}@153nlS+Fdakp3CBSf>$Lb*SBRqZMq^%jj-~sG+CuSAEAkYKHZY1Q(T-%wb_}5$J}HH|)KbD!Co3-4QrZVrp4Z z-?$0msn^Cpxt!ZOY_MCm$P0zDJEAA2T6^&3OQ)s#;47T#VP=;&1g}iL!hJycBNC>se1K z2bZGG+$q3$4gGpL73I`AD6?MoWAiw=#sdQOARD@>+w0(C($)I9^-H)Df)@h*KC_6c z-K={c5^O0L4U;KKdPjF_D-?zj)Vmfvo3q+K(#EpaJS68m)FNmLryg#Rh$%8lQ>kYe z50&ca3l}adDqLhmmg<#45jR17R>GVMVUhG!l-{F+6Qd^|$f(zzmiUR%e_&Q0T#3i2 zoqYD8FJP`Lfy=ZuNm?jObHz?OjkUM&jj+P=i`ic-tu2w}Hf+ooOQ_G&=yd94)mJxB z4dLlQL$#3cCg0q(t<90#_O2G3JLYa|ZC{_;xe;3+t>f6TUhxf3+}8Ho8i=Qf7$}~Y zGa+YuZUfxHT$;{t6LN6WIUyGt6_S>SuF1;n>S)hxY^iPPs?*5hG_ufPm$YNm-5Uyr zpW!K!#=)WL+BB}Ar7IV^f*rj-V$!(~u%{__hyor9hd)3e@A>uq`L$qkV5!mV(sX5pz>vfcZ4D@(w`(~DyEo!VR z8|xb*b;$C?gs9m_K@4Z6Ee*AdXbi-(UNeIB;tL0zU<}>`;w`+x)QZit=2k>xKN8=l zM3y9oMo2%sQvo2|-G5InbD zqastoU1VMm7{GffR|GOL19zwu`*e~2?!bT*L{TuLi||@X%eym*6+ltI?ZHB!WnQos zu+mYOo~_CQ*&}+80TGBo^Z`Vlfq)n|w1p$j#&5weAb#P4UGyCn~=P*Mr*1Y0=sv0>w zISc<_!H*fJL(xworlWh5jW@yY4?uKbf)gCJFOW5qUniA$`@8Xf0~U%+>?DXle1}VP z`No@HA)*-C;4^Co2hIs~qIFqAA!_KD51jz8fEP_##QNHg_ht|O5M07d3PQU{J*tOo z;8FEUX8aN_w-rqfyr%xhj6bS>nT&ra{1l@saJ&7etsx$@UuMS3_S+`oZTm0G_>28+ zKp_4dUUn%oc$WeT4T0vR!sn%8A9#76VDD>Qknu0|u#Jp|H803`)c%SYU)fhV zq;QoZEXW`~H5W1BK;6rvZiraKunbk>*Nx?Ym$WospM-;D;NUanf2J8hzA&;(7$G*f zSOzZM4a69;6B+Q1MSJ3UR(-v~FQUr>4=G&lUKV&({hFy?tE0v=9#t?A%L4DIkD33m z`rPDyt`4%YgZ4w1HUbaXUoiEB{YO^)|4RE3Fe{2{|LX4QzCCyL8D;?m23dj&Bj7gf z7(~H!*d!`&Ap$|31LF)DG$3%MNntN9YMe)D#j-!xcdUAL=jZtuE7^#3^9t~ z{Z4INedl)a{l9eQ+2)Y8>eedHCiohEOn-@*7Y1-;3v2dE=a{_ErZ7okN= zyUnrMTky2usDOluVQ3M*n2Nipi>ce9{(!FFIm5=iM)c9V0BI>G5%(qMdNXIWf-Bsn zxzI={Xz-Z^n1{;qdU!J(Vua6^#wP`zRJU*@yM`Uy9p3pY)Sju-wI#uHcCZlstqwuL z^9>I}ZgCD<2O%=BNZAwG2pGb43^oD=#|y^|5D7(}ID*LAAH9@CJ0oduh4vSYAo=+N#&8K#QSI%Z^+t7IZrq#Im&%&JU&Y%HZ5mb_Y2Rm%Ts^#RP{U zf3KVhq)DS{!zKKO0gNS!5v26RJXZ=FDSjQl-$`JuJ1tZ_R2q1 z!p+6s9f@2}dNbwmE`)-!z4B9}T$M=BY9x0>uQI#z?ov2gDtA_@E&^S_!-i!nMYfOX z1Mc4Pf0Rd&I>@-JON*aMuS!e}d*SZLu~4dgCHqQnzpvy#i8sq>g_OV1{P%C=vr)$4 zv*(T-qo`>LLp8gv$&EsRJ}dva zTz7n5nNZc>SXYjUt~VMJJgqut&U#QzmpS)2Lr8}He7zg-j z{sag01jljZ(fm&T0Uy)@{yKWlQMK}xk&y?#t8bvtH#nF)N{+zx&+x0^VI?~9tvv<)0PqD}=MD_`PBbPjm&%sv>EgJet4ez2~7QkaDM;&-R=?V-N;DW7>xq zE`8K21N-((Ok$Q%j)Xkxz2qSwek#<-V6q|>Xx?m6&Ipk1T_)@07z?J%!q2UxkClq~ zR9JG6ESD;@H7ZR*Bb_l)JV$p^+yNSw=No25fn7g*UU@Uf)@@0OkA8#HC`oWO4XtVB zcOHIUPjAhK^OyYV3gE0Q++GN0d*Mfg)H>fPT~D3&pF7NcIpa+fB)>nsIv>uf`Mb!uzVKzL?X{&hmce8(S7I?Qo1Ng(KcT;d458n(?Il7ax z0!;B*-n~|EiyPt>HxzS% zJ6!T&T1>ftGHBk9O!h~iBIe?Q$#5P>VLp=K>%%RftQO7vt>G4$JqRWGS9cCo$kNpB zsrlUIR!`#C42nJcgEyCnATV>f+oK zE;!pr7g;+T$xxm9`Nk(qH_*NeW2@!oG{vNQ(ebpgvKMe`6bzrAW>6{P;Z zUcr)cb!PUu5V_ZVku>Z@?|JUEESZHt|20}QdW~=4fVXgb9(yEy*3c-h#&3l_fvGk1 zIL)OWPkljZ`9 zqg?iZQujeC8qMc;cN8I=L!&vBEQ#PfQzUq*&G5f_c&bGg8{AFp^qz1t+9|&&J)e5v z>kF1pFZ^}EZRmxWi(bJBOkAmywCf$i!u2;xaOE89tj5myvQr+pzMt5FJw+r3?!y z^;pNCbbk{`H7=I$JTznsk z*c2cCefS1RS5o8r(1_=-AP0xI;SjT^d^xDuX+YTt-m#K*tmLIphSq1}S9`?v4@Ro8 zIxR+Ez5(mEN>O*!|Qpw$eEj7 zlm^(AelQK{!SqI=Hl|-7>V@>%piBxmi^JF3uj6-ZqYRUEmS-8CM$RB+@{F_BBf;~w2A%71olXVwV8wC=RO1#_S$3BH`m|<|f&%GVv18(l|KNh1j zUl6>F0RhO4U<>9wxOq9;jA0WuoAX}HgTB9+w+>?g=ymB$G%mlNZoyb!2Kb<}iH=q= z`qc^E)#+!cjN}!ZW>`qzf^WOEw4?uySlj$zUTQ z)^Mqckp^juLlG$_{DB5Iqe>5GNfvK`G)kdji2{oYMrhvNcI=YVDnMleM?eD2&ocS z%$KQ7Wf>;z@Y;;oy7bzDW`yS-dLs5|8WIys?S+y^E~Sy&#kAH3ce8ANB3r?H^>D%Q z>|4+A{nepV@wchBeze{cBk2j;LzXzc{@~#k>hrRxfohUo-J5=U*U@j?VESFLp(yeMWc*~#I$LT^9?Noe?z=jCM90HkRSk%x zhJYEGUfaa{MYk(+7j#sCIHNx5efbjnY4^AEbo7Gz5*)!86q&YI1fQO@y7OQGcKM5o zvnf;_Q>J2_ZH0HA=L|z>gxYsX&{lT3WTfBVG82m4!kl_0e*WgdtiUid;Bb!eJj`os zq_Nbu`9rs{kyq5?mlioSD-*1#ya*r$Y#+Hr$zsmWS(uJU@^*Q+&Unj$2(*m@&R}ny z=hvwf4ht8%YwGYEFZa~U(9iCwsAMLC#zFFiGg?U~{D9V`K5(8?TL+$WchFnx9qtDb z5WH5H9_SHz2tPzZlk#A-k=F?ou+Y83RL!%-hbBP(a2BIWpf;sL;1v^%+nS8VZG(UT zqqHH-xNQ(HsM*^aCNg~FdpN(&c~Whc*x}%d`BF)Oi0MgsT_U{X5^K=?jHp;f@WOK0 zr)#tv^-ouDoMFpHZu6_$mU_mK1WWX&pzT*@fmeuE_c?f`T0G(&d%@yz5P?WxJ-UmS zPXXz3!6>9jZf>(8Kbe~H$)iw96jZkVShA-Q!!6G3!cSD-i6%=^$ZxT8tMJ1|R<%!U z+Uo_Kr3+nX4Da10VBlyT=P60WHa{xd;O;B*jNla0nmQ$Tzw-G1yJvE_2C*`nlD!9T%G20_?!Ph1Tz3}eZ9Yx-K zyjeSpd1v@fE=3o3=~VRB$m^jeiS)3}=(N1RkPIXS2JL&47al;C@sTmbt|o~#iJe25 zq|kV)e}ZBb!YneFmFum3zy`~ta5kU%-}%lqY#rg-ob9S0w>w`E{*|-H1-{5#NzXAW z-QR$c0j*z4SOvSn^Fx;^|ad{6@ReR^Z-hM-k{=n!`Z47Xe*7r*dXG}BL$l0-bxB|tNUn9 z1&V~_R3IS})w9*XrvlMWyYrL+&2#Zu74M56HcNpnJd;vMt3jkd6DL9^CN|-2vC|e) zpqYj*u0Yq&h=4*C>yA{>kw8m-CSHZuulki#9wFK&PsC5kGx1mW5e$~@*ep5H2xa+i zl{%$65h-*>lS+3qNzBllh!nb`Nu@h&N^%#aJBE(*Kto7}(4DZ0)}6?w6LhCjmnh6S zQ=Eyq6KL}Rss}$CMR+#t>l}&Ie%eUPTbyM|iI!oDqo{^(kZSa_CjhD+kyPLjnf4Yo zi+ac4BClZgIr4_H(Ww8?mmtNLOc2p24k&ZPlZJQtJWSxuyToh4>Yre3w-qt?s}WiN z%_|o}oTW~9pwo1A2=CkrZKqdly2&+A?Qvi|t}fl=X$X&kq+!uzC8UCeoeJv9 zdh{x9Vmb)ix*lO&uU6jc82BA?T<`XNNB4*lqWQbE^5f1xlFk@h(8#2S%NfAILn@>6 zXlC+3vC2y$l{<1$d@3rF+JJN+(9275PAx)dL;|yC+M#?QH zKjW*Mb#jX~U*{e0fFHm|yzyZFEZ^B~Ginft5FHof41*jY0A0j7+PpA~zvPLKs5r3_ zjARDh*&;&#D%0KxdTPQeX-e>n`!bd&Bk*xr*;db$^(LI?A$mmj{`-RzO?P?}Ff zWPO|*5hsV@O?9|f<>jcfSFg(^3$JOD1t_L%4=Mgan;%IsxO+@1h5uaf^-p^j*m5nr z+Lp^yaX@aw54i?+k3*f(i9)NiB8*;5*yHn3v@fw4F#>QiQj<&%sk`@rZjA z5M5RrCD4+RQK;s*-h5JxKx=-K46)Cwq?$TIUfm7-7_p)iV|CRXn$ z;nW9nKfFPe6+YXE$3#;lwCxoi#MU)LBsRdsn_6Iy3{kr9iG(Pd zgeaSY3NBUCT^cy_6|7gYP0p~+!mCb#=AU$m?88qvP0UUtYw(0&6di+D>^VU)o^Uc4 z#d4kK1#MR9`^pbR5guCWHDjJudo1oxIGGBQu3+mxDW0>#5dC2*Ju{)t@*KerTLRDZ zv-lEkC3+SaO#Q~h4CRNtL=E^Q=Up0nhPLu|9n|z_J|7h%0U;vVU4y7f)G7dH9Vy8j z7tq7Ua_3=kZgyX!C5^4_PFgZikDReE`61Rxm-2qt)JY+S%aFrmNZQ8{zuLui0v;_+ z9YGO~xEPvwF=S*Q<34h(qu4VhcG(MRd6uLjDfp2mI2ZC$=Qwmf$AOp&rI-svQV7+# zAPK<_CfDM5wug8m&;x<->xCK5i4FLfg<}F^_&x_8<<`#*!J&o?n??D+3HaIPV2Vh6 zU|J;8VmS!iW&Jq(jGN##ViPGbV*Sc-(-spWIrY@nIy@(yDhMxAv8q)A8$rOlb__NG z1~(O6r6qt2Y?9W~!mesPp))bZNq9R!53}A-jqaqnGp&=O|1#mQYDJy?dnyu|tH(&O zw@3R{z!x23M5;wCQF^GQ`k%@>&JINzIdnbgO!yy~sFR0KregdVn`(TNMO zFEWb0ef)=^F&=p=DgLpS<94od6E)8dNCZ7?$jG@RA(ns|3RrU(CNM`{Mz@z;4&4+o zekPJZm=;1Zf>EYBqu0J+S2M+cUz;f!Ouu=barkM%Q|oB{N9QIv(-4D8#8ymf7e@?^ z7|bj=dG5>-e$p9;>5sIWJF^6;>G>7|wtMnE=+c4XyU~$kgNKFT|EOfCreq@sm~O{l zBO-g@l`7N6fkb1mNl_^CO`#lw#&S?^-B>gu@=2}Z6vfsN%OEjQ_ccOT-;>Fq$-+Q1 zhJhndjUz9XihuDqAmb(qLNc)+VlPA7Wus6aqlhE+AYgGFi_YoT3!z>Yl2EAm#S?%` zpddprWVxuv!6pS=AadY2G`-Bi-MUsj@Lc?ohq$azy%4I`;YC+KwP&(}!8W36t1_HsuLoK)vIdMIb z7_2a<>qXi(Vg!C8K2k;bX4Yne>=7ff#0hEtK^!A-?HRMh(T;9f)&5VZ6XM!4<`Rda zusWmIj>_^24Jo#d2jN;(3`F8clp%v=d+{`Q)07srv+@q&bjrb^$VGaQ;)tUXbz4a@ zmxG8yo%oh)!MIc94W^B#qmDfxtf!dB9xm13@P1G|IiVuPV1nTnr?tC*zgsVa?DZ4L z|C2sKRh9J14Y$xAkyH^+rzjyon_=n=LA~YNT?(KWHyCevlNR*CPp>1~mdX5xJ}!3t zpug4vN)a{8UTac~2Q~tRR67P6u@bLS5DXY1Z#nCN^mREJrFryuGPmaq2QvIBAXlwa z{m4pZCk=@^olSB!qr}9d4CUvILUoLq=w6O~fQ34SO>Xf0t60ozr9h_8gG)>TO1+si zt-Q1gmOMsw$K5#>2;hPl8kq64Bao+^yIh4O%0enjyU2W;a{-78W=H}uK#_!~l28v; zD(GnJIg{vj#?1~Kl~{{~wlkmeEp@2>gdC`{&P}k+WF{?Anm$6O{fNs1aM?CTr#x36 zBrPpZ=<_2k0|9oKOk%dAV{v{sUrX;2qQ@7@yIdWuD(@6DhXEb3I-*WNbpsV75*_R` z2_7KQH{kwQ`fPtB2fkKvqOdcOnmNw>lp4&=_bZw6FcT*FZ;C!z&bnb@NnB-KSR;CN z1Bj9gBU0=nFr;9|z+eE8cZOB%bb3s=;w)d4o>BpE?-xg>0O9>{EwO zud1h_R;Z2$)w2(N`qa5N*&+mp+B(|^rh{IkJ;X8;BD8R~xf&k$m6c)npyeS1JpzQI zl^lBOX_FMprooNw!sfO~;#AWA3B1M{RQ)* z+0`MKLNPy@>M)*H#QaCi)WG9q-T1IkSx`ro(gXF>lo=;kN)H>oEYN?~3 z)%tKhUbpa0hcG|tit%2ngjdusf7xk5%TQ%k@Eh^AlKIgDyCd&1j?b9McRNd58!yI$ zz8%OgM`!*nH4n#@2cGWA?{nUUL>q(^sP^D}SfXIQo;q_@#w$BAzpr}k+et6})2? ze(qg}qS16hoDZ1D8+SYW6Ff2T1=t$Q{N2X45kLam-L92{SInTwo{RohIklFqwh z79G)LygM47O&@(UKWz~6_0AS_RHUhSg>lHwbKFWO=Je z7;34_{iDx6@munFTmFvxcAo2fzLz}j&+_zmD$V;M{C(~tE|&9mbAFvKm>&uFo5AfN zzd6qY)4M}C>%(ayD)WDBHlPMPuYebKV18$;Nzuhwcd=!kKZ`Rk%pYUmllj2m`1r#j zk1xWRHog=ah51)}zR#}<-VFFnsii4wpZxC>|9$EYDgJiq(-eOU=K|2o)sr1|THiT{ZxT48N{;RWX08YFicaqjUcsJA7Bko>IQQ z^v*KArfhQ=e;Y^eaRC3f6*zZ)XQeoGFV5UE-#QY{uP=SD6o=uLbl~q)epShQ>%=<$ zN6GD_IAuN?l^7lR?u;Yf&zCd*M{^9^hXH!#M+d)8!s_XIM;y!+$Fi9poq_JguUUr1 z8Uc9{h>og$=kbj=o5Ek_Z*blgJRk7=!5t~SExjY3|1@kh=N;rFAn;GM{@Sp1KA8|04^(HT4bqW z=&JDbDk|cm>v;SVzuwdJq(B*?=1=%+_424f;Zxu`tb(dXOfaL zMd9O=oPG+Qsc`*JuY$3Kz$ss|+a#k-|8j-T$ig2`cw-j+yuv4E;m}zje_R&+fx_#v z@UIj;I16`C9+b|XW#JtZzB~&*MB!~&cyHiT{`wt96XK)mB*kCXCP?$Sz(I;%zn)yF z_%Bqr-bOoC;nS6z-dS>HD|~AfzCg=AL&jUH=AsoeConh7!(>n-?8*nk-dQ0X~F z@$2o%b;|pEh3n(T9aGX{ypYq8^=*^1ZeRbU+J$1U++kIN(Ec4 zaQ)$$C7J3V$dI{{T3Zr~Y~&wG+B9TnJo$ zp0P#3Y`?FPO5jv4 zdsTnFLq%}7;ICwZRr^)!SSlrcjIK2rk?zspFN;HhIz0g&nShT^z^5nR*C*gh6YvKU z@ZTihn}OHl$k$s5{9h*E-zVTa$X#y533zP+etZIcRsudU0iT?JUzLC_NWgDRz*i;U z&nDn+06!#0`M;mQ|6Ky^pF+}Am=N<`op9K8G1bk2eetrV}^8~yh0iTh8FG#?bCE)ia;7Dk7dXP54Cat*}JD##UViqadL$K)E9;}7Y(Fs}RKx}KB^2`0whKb0d zw&g>1rSQ>xVJJsisw0|AUHlRjvZh+rJF;x<5u5y8e`Jr0Fll2%gwsY}BZjr7B4aEjW8uhH+p6u$8mL-hhK%BN+=ZGc6Ku9A zVS$suP-zvT_R{92@epSzNe=6+?F|q`N$Ce$1j-0lZU6R0SYuX8k}VL*WRVRRZEVjN zSkfjLnqg}fhET}1k+H%pf=wI`>owY-r6q_A)ctZY?2Jl_vvCt8!p2o(Lng~Ckg`pq z%-Ly5o43nlx#p6l>CMw&@CDXFCQLN8Bd0gOlnG3Zz}Q`8!Zd9$egX{Cz%=p|Ghkqf zEJ(p_DykqS2dwB!C0ntu5e>UBu&T-?HIYf#30E{Ukoi4}5>{YhtQirXT*fM#7J)UzRCd zK-67CCYAt1RqtJ!%UyB!H9y?KNg~7I1rWU>R!0u(k|O>2;4AuPh8=;Rbh6j^^^DjoSEE8$w(l|b3%DaVW&TSKFX zR#mexV219oaT@FfPM=2EinRaI3H{NV5JppvEDOF`m1N77^E=VVjV&$N1hvgRNU;oE z%I0rU)K8Rzw(2>=GU&}}kpzsaaHls(ce1{l3Io=)FzHjPO`5Y>0|_g+eb5mK25OkB zcEVLtDL=wRzbJfQg}IrGciKb-elSEgo*30#Ba9^1iV>q0x3#~R29j=?VKSyx81}9e z<5g|bG*Wz4J8`la|0Yj>G)+kvav)?PguWN%kPs;Zhy;#rAp2BO*3V%T786ln78{so z4&$4~PyGePZbe=?brOY>UO)eN)5I$^CCr&B9~7JoQ%f`Sv`a37O=%;r3B8-3)g#6K zk8=v>$?k=

Z43~D7P(bS<8TNHC zhyzyo+tj+Rc2_HAoqipUb|=ICUsm{f^)RX3aVmUv`W4LR^#;k-^46>H##IT{@n`3M zjtc)%C2(B1ywPr*DnANeh5zjEzXnE8V7X9UHF1~p%f56vG`0XYcKDha>7(7?r&!n2+-ymmF8LYbzuORp{L_)AoH?QZ=K@!y*jUT;X$?jQ6ES(N}CuTIayS>g5j z4DG7dS(?3F`&p;L>-xu85qT;1ZT*5m!8OX&4!uXC{Oj=gJ&Shf{k45%6$g6}o_6@} zu94i@olJ^rU0Q!Mv;{Xs_}9xL3)Oq1g*Qr$?DEp#wYw8+#a4Lz-fFoDulc03NPji{ zIXo!+I)1%hp-qKvhFQ(zHJ<=f|w@tW7a4k*?-bs0puW`no&u==AINfuRb&S2HSCr(45vKLlfaHK1G_ z{_rfE@=BL>Y2Sptmp=0MC>8$z8^nD!Z%sn@W)*(TwQ&L2;g3iN-?dnB-k4Q@+2Kz} z2>)=Ey!2&^_KVPccKFScB*!mw#VRp6oOZ|KM&H{p z9ZnlgaLpZPqwRSwPYD0$S{Z&G)|KrmJ1(NLR9vvvJta9GqQ;G@2mZy;06{UP$8Qp! il|{j^`z0CvjkF4&ob32%xXcxP%{Iw7Co4b}$^H);eWEM? literal 0 HcmV?d00001 diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/generic_object.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/generic_object.rb new file mode 100644 index 000000000..ec5aa9dcb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/generic_object.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true +begin + require 'ostruct' +rescue LoadError + warn "JSON::GenericObject requires 'ostruct'. Please install it with `gem install ostruct`." +end + +module JSON + class GenericObject < OpenStruct + class << self + alias [] new + + def json_creatable? + @json_creatable + end + + attr_writer :json_creatable + + def json_create(data) + data = data.dup + data.delete JSON.create_id + self[data] + end + + def from_hash(object) + case + when object.respond_to?(:to_hash) + result = new + object.to_hash.each do |key, value| + result[key] = from_hash(value) + end + result + when object.respond_to?(:to_ary) + object.to_ary.map { |a| from_hash(a) } + else + object + end + end + + def load(source, proc = nil, opts = {}) + result = ::JSON.load(source, proc, opts.merge(:object_class => self)) + result.nil? ? new : result + end + + def dump(obj, *args) + ::JSON.dump(obj, *args) + end + end + self.json_creatable = false + + def to_hash + table + end + + def [](name) + __send__(name) + end unless method_defined?(:[]) + + def []=(name, value) + __send__("#{name}=", value) + end unless method_defined?(:[]=) + + def |(other) + self.class[other.to_hash.merge(to_hash)] + end + + def as_json(*) + { JSON.create_id => self.class.name }.merge to_hash + end + + def to_json(*a) + as_json.to_json(*a) + end + end if defined?(::OpenStruct) +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/truffle_ruby/generator.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/truffle_ruby/generator.rb new file mode 100644 index 000000000..c814106de --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/truffle_ruby/generator.rb @@ -0,0 +1,690 @@ +# frozen_string_literal: true +module JSON + module TruffleRuby + module Generator + MAP = { + "\x0" => '\u0000', + "\x1" => '\u0001', + "\x2" => '\u0002', + "\x3" => '\u0003', + "\x4" => '\u0004', + "\x5" => '\u0005', + "\x6" => '\u0006', + "\x7" => '\u0007', + "\b" => '\b', + "\t" => '\t', + "\n" => '\n', + "\xb" => '\u000b', + "\f" => '\f', + "\r" => '\r', + "\xe" => '\u000e', + "\xf" => '\u000f', + "\x10" => '\u0010', + "\x11" => '\u0011', + "\x12" => '\u0012', + "\x13" => '\u0013', + "\x14" => '\u0014', + "\x15" => '\u0015', + "\x16" => '\u0016', + "\x17" => '\u0017', + "\x18" => '\u0018', + "\x19" => '\u0019', + "\x1a" => '\u001a', + "\x1b" => '\u001b', + "\x1c" => '\u001c', + "\x1d" => '\u001d', + "\x1e" => '\u001e', + "\x1f" => '\u001f', + '"' => '\"', + '\\' => '\\\\', + }.freeze # :nodoc: + + SCRIPT_SAFE_MAP = MAP.merge( + '/' => '\\/', + "\u2028" => '\u2028', + "\u2029" => '\u2029', + ).freeze + + SCRIPT_SAFE_ESCAPE_PATTERN = /[\/"\\\x0-\x1f\u2028-\u2029]/ + + # Convert a UTF8 encoded Ruby string _string_ to a JSON string, encoded with + # UTF16 big endian characters as \u????, and return it. + def self.utf8_to_json(string, script_safe = false) # :nodoc: + if script_safe + if SCRIPT_SAFE_ESCAPE_PATTERN.match?(string) + string.gsub(SCRIPT_SAFE_ESCAPE_PATTERN, SCRIPT_SAFE_MAP) + else + string + end + else + if /["\\\x0-\x1f]/.match?(string) + string.gsub(/["\\\x0-\x1f]/, MAP) + else + string + end + end + end + + def self.utf8_to_json_ascii(original_string, script_safe = false) # :nodoc: + string = original_string.b + map = script_safe ? SCRIPT_SAFE_MAP : MAP + string.gsub!(/[\/"\\\x0-\x1f]/n) { map[$&] || $& } + string.gsub!(/( + (?: + [\xc2-\xdf][\x80-\xbf] | + [\xe0-\xef][\x80-\xbf]{2} | + [\xf0-\xf4][\x80-\xbf]{3} + )+ | + [\x80-\xc1\xf5-\xff] # invalid + )/nx) { |c| + c.size == 1 and raise GeneratorError.new("invalid utf8 byte: '#{c}'", original_string) + s = c.encode(::Encoding::UTF_16BE, ::Encoding::UTF_8).unpack('H*')[0] + s.force_encoding(::Encoding::BINARY) + s.gsub!(/.{4}/n, '\\\\u\&') + s.force_encoding(::Encoding::UTF_8) + } + string.force_encoding(::Encoding::UTF_8) + string + rescue => e + raise GeneratorError.new(e.message, original_string) + end + + def self.valid_utf8?(string) + encoding = string.encoding + (encoding == Encoding::UTF_8 || encoding == Encoding::ASCII) && + string.valid_encoding? + end + + # This class is used to create State instances, that are use to hold data + # while generating a JSON text from a Ruby data structure. + class State + def self.generate(obj, opts = nil, io = nil) + new(opts).generate(obj, io) + end + + # Creates a State object from _opts_, which ought to be Hash to create + # a new State instance configured by _opts_, something else to create + # an unconfigured instance. If _opts_ is a State object, it is just + # returned. + def self.from_state(opts) + if opts + case + when self === opts + return opts + when opts.respond_to?(:to_hash) + return new(opts.to_hash) + when opts.respond_to?(:to_h) + return new(opts.to_h) + end + end + new + end + + # Instantiates a new State object, configured by _opts_. + # + # _opts_ can have the following keys: + # + # * *indent*: a string used to indent levels (default: ''), + # * *space*: a string that is put after, a : or , delimiter (default: ''), + # * *space_before*: a string that is put before a : pair delimiter (default: ''), + # * *object_nl*: a string that is put at the end of a JSON object (default: ''), + # * *array_nl*: a string that is put at the end of a JSON array (default: ''), + # * *script_safe*: true if U+2028, U+2029 and forward slash (/) should be escaped + # as to make the JSON object safe to interpolate in a script tag (default: false). + # * *check_circular*: is deprecated now, use the :max_nesting option instead, + # * *max_nesting*: sets the maximum level of data structure nesting in + # the generated JSON, max_nesting = 0 if no maximum should be checked. + # * *allow_nan*: true if NaN, Infinity, and -Infinity should be + # generated, otherwise an exception is thrown, if these values are + # encountered. This options defaults to false. + def initialize(opts = nil) + @indent = '' + @space = '' + @space_before = '' + @object_nl = '' + @array_nl = '' + @allow_nan = false + @ascii_only = false + @as_json = false + @depth = 0 + @buffer_initial_length = 1024 + @script_safe = false + @strict = false + @max_nesting = 100 + configure(opts) if opts + end + + # This string is used to indent levels in the JSON text. + attr_accessor :indent + + # This string is used to insert a space between the tokens in a JSON + # string. + attr_accessor :space + + # This string is used to insert a space before the ':' in JSON objects. + attr_accessor :space_before + + # This string is put at the end of a line that holds a JSON object (or + # Hash). + attr_accessor :object_nl + + # This string is put at the end of a line that holds a JSON array. + attr_accessor :array_nl + + # This proc converts unsupported types into native JSON types. + attr_accessor :as_json + + # This integer returns the maximum level of data structure nesting in + # the generated JSON, max_nesting = 0 if no maximum is checked. + attr_accessor :max_nesting + + # If this attribute is set to true, forward slashes will be escaped in + # all json strings. + attr_accessor :script_safe + + # If this attribute is set to true, attempting to serialize types not + # supported by the JSON spec will raise a JSON::GeneratorError + attr_accessor :strict + + # :stopdoc: + attr_reader :buffer_initial_length + + def buffer_initial_length=(length) + if length > 0 + @buffer_initial_length = length + end + end + # :startdoc: + + # This integer returns the current depth data structure nesting in the + # generated JSON. + attr_accessor :depth + + def check_max_nesting # :nodoc: + return if @max_nesting.zero? + current_nesting = depth + 1 + current_nesting > @max_nesting and + raise NestingError, "nesting of #{current_nesting} is too deep" + end + + # Returns true, if circular data structures are checked, + # otherwise returns false. + def check_circular? + !@max_nesting.zero? + end + + # Returns true if NaN, Infinity, and -Infinity should be considered as + # valid JSON and output. + def allow_nan? + @allow_nan + end + + # Returns true, if only ASCII characters should be generated. Otherwise + # returns false. + def ascii_only? + @ascii_only + end + + # Returns true, if forward slashes are escaped. Otherwise returns false. + def script_safe? + @script_safe + end + + # Returns true, if strict mode is enabled. Otherwise returns false. + # Strict mode only allow serializing JSON native types: Hash, Array, + # String, Integer, Float, true, false and nil. + def strict? + @strict + end + + # Configure this State instance with the Hash _opts_, and return + # itself. + def configure(opts) + if opts.respond_to?(:to_hash) + opts = opts.to_hash + elsif opts.respond_to?(:to_h) + opts = opts.to_h + else + raise TypeError, "can't convert #{opts.class} into Hash" + end + opts.each do |key, value| + instance_variable_set "@#{key}", value + end + + # NOTE: If adding new instance variables here, check whether #generate should check them for #generate_json + @indent = opts[:indent] || '' if opts.key?(:indent) + @space = opts[:space] || '' if opts.key?(:space) + @space_before = opts[:space_before] || '' if opts.key?(:space_before) + @object_nl = opts[:object_nl] || '' if opts.key?(:object_nl) + @array_nl = opts[:array_nl] || '' if opts.key?(:array_nl) + @allow_nan = !!opts[:allow_nan] if opts.key?(:allow_nan) + @as_json = opts[:as_json].to_proc if opts[:as_json] + @ascii_only = opts[:ascii_only] if opts.key?(:ascii_only) + @depth = opts[:depth] || 0 + @buffer_initial_length ||= opts[:buffer_initial_length] + + @script_safe = if opts.key?(:script_safe) + !!opts[:script_safe] + elsif opts.key?(:escape_slash) + !!opts[:escape_slash] + else + false + end + + @strict = !!opts[:strict] if opts.key?(:strict) + + if !opts.key?(:max_nesting) # defaults to 100 + @max_nesting = 100 + elsif opts[:max_nesting] + @max_nesting = opts[:max_nesting] + else + @max_nesting = 0 + end + self + end + alias merge configure + + # Returns the configuration instance variables as a hash, that can be + # passed to the configure method. + def to_h + result = {} + instance_variables.each do |iv| + iv = iv.to_s[1..-1] + result[iv.to_sym] = self[iv] + end + result + end + + alias to_hash to_h + + # Generates a valid JSON document from object +obj+ and + # returns the result. If no valid JSON document can be + # created this method raises a + # GeneratorError exception. + def generate(obj, anIO = nil) + if @indent.empty? and @space.empty? and @space_before.empty? and @object_nl.empty? and @array_nl.empty? and + !@ascii_only and !@script_safe and @max_nesting == 0 and (!@strict || Symbol === obj) + result = generate_json(obj, ''.dup) + else + result = obj.to_json(self) + end + JSON::TruffleRuby::Generator.valid_utf8?(result) or raise GeneratorError.new( + "source sequence #{result.inspect} is illegal/malformed utf-8", + obj + ) + if anIO + anIO.write(result) + anIO + else + result + end + end + + def generate_new(obj, anIO = nil) # :nodoc: + dup.generate(obj, anIO) + end + + # Handles @allow_nan, @buffer_initial_length, other ivars must be the default value (see above) + private def generate_json(obj, buf) + case obj + when Hash + buf << '{' + first = true + obj.each_pair do |k,v| + buf << ',' unless first + + key_str = k.to_s + if key_str.class == String + fast_serialize_string(key_str, buf) + elsif key_str.is_a?(String) + generate_json(key_str, buf) + else + raise TypeError, "#{k.class}#to_s returns an instance of #{key_str.class}, expected a String" + end + + buf << ':' + generate_json(v, buf) + first = false + end + buf << '}' + when Array + buf << '[' + first = true + obj.each do |e| + buf << ',' unless first + generate_json(e, buf) + first = false + end + buf << ']' + when String + if obj.class == String + fast_serialize_string(obj, buf) + else + buf << obj.to_json(self) + end + when Integer + buf << obj.to_s + when Symbol + if @strict + fast_serialize_string(obj.name, buf) + else + buf << obj.to_json(self) + end + else + # Note: Float is handled this way since Float#to_s is slow anyway + buf << obj.to_json(self) + end + end + + # Assumes !@ascii_only, !@script_safe + private def fast_serialize_string(string, buf) # :nodoc: + buf << '"' + unless string.encoding == ::Encoding::UTF_8 + begin + string = string.encode(::Encoding::UTF_8) + rescue Encoding::UndefinedConversionError => error + raise GeneratorError.new(error.message, string) + end + end + raise GeneratorError.new("source sequence is illegal/malformed utf-8", string) unless string.valid_encoding? + + if /["\\\x0-\x1f]/.match?(string) + buf << string.gsub(/["\\\x0-\x1f]/, MAP) + else + buf << string + end + buf << '"' + end + + # Return the value returned by method +name+. + def [](name) + if respond_to?(name) + __send__(name) + else + instance_variable_get("@#{name}") if + instance_variables.include?("@#{name}".to_sym) # avoid warning + end + end + + def []=(name, value) + if respond_to?(name_writer = "#{name}=") + __send__ name_writer, value + else + instance_variable_set "@#{name}", value + end + end + end + + module GeneratorMethods + module Object + # Converts this object to a string (calling #to_s), converts + # it to a JSON string, and returns the result. This is a fallback, if no + # special method #to_json was defined for some object. + def to_json(state = nil, *) + state = State.from_state(state) if state + if state&.strict? + value = self + if state.strict? && !(false == value || true == value || nil == value || String === value || Array === value || Hash === value || Integer === value || Float === value || Fragment === value) + if state.as_json + value = state.as_json.call(value) + unless false == value || true == value || nil == value || String === value || Array === value || Hash === value || Integer === value || Float === value || Fragment === value + raise GeneratorError.new("#{value.class} returned by #{state.as_json} not allowed in JSON", value) + end + value.to_json(state) + else + raise GeneratorError.new("#{value.class} not allowed in JSON", value) + end + end + else + to_s.to_json + end + end + end + + module Hash + # Returns a JSON string containing a JSON object, that is unparsed from + # this Hash instance. + # _state_ is a JSON::State object, that can also be used to configure the + # produced JSON string output further. + # _depth_ is used to find out nesting depth, to indent accordingly. + def to_json(state = nil, *) + state = State.from_state(state) + state.check_max_nesting + json_transform(state) + end + + private + + def json_shift(state) + state.object_nl.empty? or return '' + state.indent * state.depth + end + + def json_transform(state) + depth = state.depth += 1 + + if empty? + state.depth -= 1 + return '{}' + end + + delim = ",#{state.object_nl}" + result = +"{#{state.object_nl}" + first = true + indent = !state.object_nl.empty? + each { |key, value| + result << delim unless first + result << state.indent * depth if indent + + key_str = key.to_s + if key_str.is_a?(String) + key_json = key_str.to_json(state) + else + raise TypeError, "#{key.class}#to_s returns an instance of #{key_str.class}, expected a String" + end + + result = +"#{result}#{key_json}#{state.space_before}:#{state.space}" + if state.strict? && !(false == value || true == value || nil == value || String === value || Array === value || Hash === value || Integer === value || Float === value || Fragment === value) + if state.as_json + value = state.as_json.call(value) + unless false == value || true == value || nil == value || String === value || Array === value || Hash === value || Integer === value || Float === value || Fragment === value + raise GeneratorError.new("#{value.class} returned by #{state.as_json} not allowed in JSON", value) + end + result << value.to_json(state) + else + raise GeneratorError.new("#{value.class} not allowed in JSON", value) + end + elsif value.respond_to?(:to_json) + result << value.to_json(state) + else + result << %{"#{String(value)}"} + end + first = false + } + depth = state.depth -= 1 + unless first + result << state.object_nl + result << state.indent * depth if indent + end + result << '}' + result + end + end + + module Array + # Returns a JSON string containing a JSON array, that is unparsed from + # this Array instance. + # _state_ is a JSON::State object, that can also be used to configure the + # produced JSON string output further. + def to_json(state = nil, *) + state = State.from_state(state) + state.check_max_nesting + json_transform(state) + end + + private + + def json_transform(state) + depth = state.depth += 1 + + if empty? + state.depth -= 1 + return '[]' + end + + result = '['.dup + if state.array_nl.empty? + delim = "," + else + result << state.array_nl + delim = ",#{state.array_nl}" + end + + first = true + indent = !state.array_nl.empty? + each { |value| + result << delim unless first + result << state.indent * depth if indent + if state.strict? && !(false == value || true == value || nil == value || String === value || Array === value || Hash === value || Integer === value || Float === value || Fragment === value || Symbol == value) + if state.as_json + value = state.as_json.call(value) + unless false == value || true == value || nil == value || String === value || Array === value || Hash === value || Integer === value || Float === value || Fragment === value || Symbol === value + raise GeneratorError.new("#{value.class} returned by #{state.as_json} not allowed in JSON", value) + end + result << value.to_json(state) + else + raise GeneratorError.new("#{value.class} not allowed in JSON", value) + end + elsif value.respond_to?(:to_json) + result << value.to_json(state) + else + result << %{"#{String(value)}"} + end + first = false + } + depth = state.depth -= 1 + result << state.array_nl + result << state.indent * depth if indent + result << ']' + end + end + + module Integer + # Returns a JSON string representation for this Integer number. + def to_json(*) to_s end + end + + module Float + # Returns a JSON string representation for this Float number. + def to_json(state = nil, *args) + state = State.from_state(state) + if infinite? || nan? + if state.allow_nan? + to_s + elsif state.strict? && state.as_json + casted_value = state.as_json.call(self) + + if casted_value.equal?(self) + raise GeneratorError.new("#{self} not allowed in JSON", self) + end + + state.check_max_nesting + state.depth += 1 + result = casted_value.to_json(state, *args) + state.depth -= 1 + result + else + raise GeneratorError.new("#{self} not allowed in JSON", self) + end + else + to_s + end + end + end + + module Symbol + def to_json(state = nil, *args) + state = State.from_state(state) + if state.strict? + name.to_json(state, *args) + else + super + end + end + end + + module String + # This string should be encoded with UTF-8 A call to this method + # returns a JSON string encoded with UTF16 big endian characters as + # \u????. + def to_json(state = nil, *args) + state = State.from_state(state) + if encoding == ::Encoding::UTF_8 + unless valid_encoding? + raise GeneratorError.new("source sequence is illegal/malformed utf-8", self) + end + string = self + else + string = encode(::Encoding::UTF_8) + end + if state.ascii_only? + %("#{JSON::TruffleRuby::Generator.utf8_to_json_ascii(string, state.script_safe)}") + else + %("#{JSON::TruffleRuby::Generator.utf8_to_json(string, state.script_safe)}") + end + rescue Encoding::UndefinedConversionError => error + raise ::JSON::GeneratorError.new(error.message, self) + end + + # Module that holds the extending methods if, the String module is + # included. + module Extend + # Raw Strings are JSON Objects (the raw bytes are stored in an + # array for the key "raw"). The Ruby String can be created by this + # module method. + def json_create(o) + o['raw'].pack('C*') + end + end + + # Extends _modul_ with the String::Extend module. + def self.included(modul) + modul.extend Extend + end + + # This method creates a raw object hash, that can be nested into + # other data structures and will be unparsed as a raw string. This + # method should be used, if you want to convert raw strings to JSON + # instead of UTF-8 strings, e. g. binary data. + def to_json_raw_object + { + JSON.create_id => self.class.name, + 'raw' => self.unpack('C*'), + } + end + + # This method creates a JSON text from the result of + # a call to to_json_raw_object of this String. + def to_json_raw(*args) + to_json_raw_object.to_json(*args) + end + end + + module TrueClass + # Returns a JSON string for true: 'true'. + def to_json(*) 'true' end + end + + module FalseClass + # Returns a JSON string for false: 'false'. + def to_json(*) 'false' end + end + + module NilClass + # Returns a JSON string for nil: 'null'. + def to_json(*) 'null' end + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/version.rb b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/version.rb new file mode 100644 index 000000000..f9ac3e17a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/json-2.13.2/lib/json/version.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +module JSON + VERSION = '2.13.2' +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/AUTHORS b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/AUTHORS new file mode 100644 index 000000000..c0be88c30 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/AUTHORS @@ -0,0 +1,119 @@ +Tim Rudat +Joakim Antman +Jeff Lindsay +A.B +shields +Bob Aman +Emilio Cristalli +Egon Zemmer +Zane Shannon +Nikita Shatov +Paul Battley +Oliver +blackanger +Ville Lautanala +Tyler Pickett +James Stonehill +Adam Michael +Martin Emde +Saverio Trioni +Peter M. Goldstein +Korstiaan de Ridder +Richard Larocque +Andrew Davis +Yason Khaburzaniya +Klaas Jan Wierenga +Nick Hammond +Bart de Water +Steve Sloan +Antonis Berkakis +Bill Mill +Kevin Olbrich +Simon Fish +jb08 +lukas +Rodrigo López Dato +ojab +Ritikesh +sawyerzhang +Larry Lv +smudge +wohlgejm +Tom Wey +yann ARMAND +Brian Flethcer +Jurriaan Pruis +Erik Michaels-Ober +Matthew Simpson +Steven Davidovitz +Nicolas Leger +Pierre Michard +RahulBajaj +Rob Wygand +Ryan Brushett +Ryan McIlmoyl +Ryan Metzler +Severin Schoepke +Shaun Guth +Steve Teti +T.J. Schuck +Taiki Sugawara +Takehiro Adachi +Tobias Haar +Toby Pinder +Tomé Duarte +Travis Hunter +Yuji Yaginuma +Zuzanna Stolińska +aarongray +danielgrippi +fusagiko/takayamaki +mai fujii +nycvotes-dev +revodoge +rono23 +antonmorant +Adam Greene +Alexander Boyd +Alexandr Kostrikov +Aman Gupta +Ariel Salomon +Arnaud Mesureur +Artsiom Kuts +Austin Kabiru +B +Bouke van der Bijl +Brandon Keepers +Dan Leyden +Dave Grijalva +Dmitry Pashkevich +Dorian Marié +Ernie Miller +Evgeni Golov +Ewoud Kohl van Wijngaarden +HoneyryderChuck +Igor Victor +Ilyaaaaaaaaaaaaa Zhitomirskiy +Jens Hausherr +Jeremiah Wuenschel +John Downey +Jordan Brough +Josh Bodah +JotaSe +Juanito Fatas +Julio Lopez +Katelyn Kasperowicz +Leonardo Saraiva +Lowell Kirsh +Loïc Lengrand +Lucas Mazza +Makoto Chiba +Manuel Bustillo +Marco Adkins +Meredith Leu +Micah Gates +Michał Begejowicz +Mike Eirih +Mike Pastore +Mingan +Mitch Birti diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CHANGELOG.md b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CHANGELOG.md new file mode 100644 index 000000000..31d3fb8d8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CHANGELOG.md @@ -0,0 +1,991 @@ +# Changelog + +## [v3.1.2](https://github.com/jwt/ruby-jwt/tree/v3.1.2) (2025-06-28) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v3.1.1...v3.1.2) + +**Fixes and enhancements:** + +- Avoid using the same digest across calls in JWT::JWA::Ecdsa and JWT::JWA::Rsa [#697](https://github.com/jwt/ruby-jwt/pull/697) +- Fix signing with a EC JWK [#699](https://github.com/jwt/ruby-jwt/pull/699) ([@anakinj](https://github.com/anakinj)) + +## [v3.1.1](https://github.com/jwt/ruby-jwt/tree/v3.1.1) (2025-06-24) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v3.1.0...v3.1.1) + +**Fixes and enhancements:** + +- Require the algorithm to be provided when signing and verifying tokens using JWKs [#695](https://github.com/jwt/ruby-jwt/pull/695) ([@anakinj](https://github.com/anakinj)) + +## [v3.1.0](https://github.com/jwt/ruby-jwt/tree/v3.1.0) (2025-06-23) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v3.0.0...v3.1.0) + +**Features:** + +- Add support for x5t header parameter for X.509 certificate thumbprint verification [#669](https://github.com/jwt/ruby-jwt/pull/669) ([@hieuk09](https://github.com/hieuk09)) +- Raise an error if the ECDSA signing or verification key is not an instance of `OpenSSL::PKey::EC` [#688](https://github.com/jwt/ruby-jwt/pull/688) ([@anakinj](https://github.com/anakinj)) +- Allow `OpenSSL::PKey::EC::Point` to be used as the verification key in ECDSA [#689](https://github.com/jwt/ruby-jwt/pull/689) ([@anakinj](https://github.com/anakinj)) +- Require claims to have been verified before accessing the `JWT::EncodedToken#payload` [#690](https://github.com/jwt/ruby-jwt/pull/690) ([@anakinj](https://github.com/anakinj)) +- Support signing and verifying tokens using a JWK [#692](https://github.com/jwt/ruby-jwt/pull/692) ([@anakinj](https://github.com/anakinj)) + +## [v3.0.0](https://github.com/jwt/ruby-jwt/tree/v3.0.0) (2025-06-14) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.10.1...v3.0.0) + +**Breaking changes:** + +- Require token signature to be verified before accessing payload [#648](https://github.com/jwt/ruby-jwt/pull/648) ([@anakinj](https://github.com/anakinj)) +- Drop support for the HS512256 algorithm [#650](https://github.com/jwt/ruby-jwt/pull/650) ([@anakinj](https://github.com/anakinj)) +- Remove deprecated claim verification methods [#654](https://github.com/jwt/ruby-jwt/pull/654) ([@anakinj](https://github.com/anakinj)) +- Remove dependency to rbnacl [#655](https://github.com/jwt/ruby-jwt/pull/655) ([@anakinj](https://github.com/anakinj)) +- Support only stricter base64 decoding (RFC 4648) [#658](https://github.com/jwt/ruby-jwt/pull/658) ([@anakinj](https://github.com/anakinj)) +- Custom algorithms are required to include `JWT::JWA::SigningAlgorithm` [#660](https://github.com/jwt/ruby-jwt/pull/660) ([@anakinj](https://github.com/anakinj)) +- Require RSA keys to be at least 2048 bits [#661](https://github.com/jwt/ruby-jwt/pull/661) ([@anakinj](https://github.com/anakinj)) +- Base64 encode and decode the k value for HMAC JWKs [#662](https://github.com/jwt/ruby-jwt/pull/662) ([@anakinj](https://github.com/anakinj)) + +Take a look at the [upgrade guide](UPGRADING.md) for more details. + +**Features:** + +- JWT::EncodedToken#verify! method that bundles signature and claim validation [#647](https://github.com/jwt/ruby-jwt/pull/647) ([@anakinj](https://github.com/anakinj)) +- Do not override the alg header if already given [#659](https://github.com/jwt/ruby-jwt/pull/659) ([@anakinj](https://github.com/anakinj)) +- Make `JWK::KeyFinder` compatible with `JWT::EncodedToken` [#663](https://github.com/jwt/ruby-jwt/pull/663) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Ruby 3.4 to CI matrix [#649](https://github.com/jwt/ruby-jwt/pull/649) ([@anakinj](https://github.com/anakinj)) +- Add logger as development dependency [#670](https://github.com/jwt/ruby-jwt/pull/670) ([@hieuk09](https://github.com/hieuk09)) + +## [v2.10.1](https://github.com/jwt/ruby-jwt/tree/v2.10.1) (2024-12-26) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.10.0...v2.10.1) + +**Fixes and enhancements:** + +- Make version constants public again [#646](https://github.com/jwt/ruby-jwt/pull/646) ([@anakinj](https://github.com/anakinj)) + +## [v2.10.0](https://github.com/jwt/ruby-jwt/tree/v2.10.0) (2024-12-25) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.9.3...v2.10.0) + +**Features:** + +- JWT::Token and JWT::EncodedToken for signing and verifying tokens [#621](https://github.com/jwt/ruby-jwt/pull/621) ([@anakinj](https://github.com/anakinj)) +- Detached payload support for JWT::Token and JWT::EncodedToken [#630](https://github.com/jwt/ruby-jwt/pull/630) ([@anakinj](https://github.com/anakinj)) +- Skip decoding payload if b64 header is present and false [#631](https://github.com/jwt/ruby-jwt/pull/631) ([@anakinj](https://github.com/anakinj)) +- Remove a few custom Rubocop configs [#638](https://github.com/jwt/ruby-jwt/pull/638) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Deprecation warnings for deprecated methods and classes [#629](https://github.com/jwt/ruby-jwt/pull/629) ([@anakinj](https://github.com/anakinj)) +- Improved documentation for public apis [#629](https://github.com/jwt/ruby-jwt/pull/629) ([@anakinj](https://github.com/anakinj)) +- Use correct methods when raising error during signing/verification with EdDSA [#633](https://github.com/jwt/ruby-jwt/pull/633) +- Fix JWT::EncodedToken behavior with empty string as token [#640](https://github.com/jwt/ruby-jwt/pull/640) ([@ragalie](https://github.com/ragalie)) +- Deprecation warnings for rbnacl backed functionality [#641](https://github.com/jwt/ruby-jwt/pull/641) ([@anakinj](https://github.com/anakinj)) + +## [v2.9.3](https://github.com/jwt/ruby-jwt/tree/v2.9.3) (2024-10-03) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.9.2...v2.9.3) + +**Fixes and enhancements:** + +- Return truthy value for `::JWT::ClaimsValidator#validate!` and `::JWT::Verify.verify_claims` [#628](https://github.com/jwt/ruby-jwt/pull/628) ([@anakinj](https://github.com/anakinj)) + +## [v2.9.2](https://github.com/jwt/ruby-jwt/tree/v2.9.2) (2024-10-03) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.9.1...v2.9.2) + +**Features:** + +- Standalone claim verification interface [#626](https://github.com/jwt/ruby-jwt/pull/626) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Updated README to correctly document `OpenSSL::HMAC` documentation [#617](https://github.com/jwt/ruby-jwt/pull/617) ([@aedryan](https://github.com/aedryan)) +- Verify JWT header format [#622](https://github.com/jwt/ruby-jwt/pull/622) ([@304](https://github.com/304)) +- Bring back `::JWT::ClaimsValidator`, `::JWT::Verify` and a few other removed interfaces for preserved backwards compatibility [#624](https://github.com/jwt/ruby-jwt/pull/624) ([@anakinj](https://github.com/anakinj)) + +## [v2.9.1](https://github.com/jwt/ruby-jwt/tree/v2.9.1) (2024-09-23) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.9.0...v2.9.1) + +**Fixes and enhancements:** + +- Fix regression in `iss` and `aud` claim validation [#619](https://github.com/jwt/ruby-jwt/pull/619) ([@anakinj](https://github.com/anakinj)) + +## [v2.9.0](https://github.com/jwt/ruby-jwt/tree/v2.9.0) (2024-09-15) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.8.2...v2.9.0) + +**Features:** + +- Build and push gem using a GH action [#612](https://github.com/jwt/ruby-jwt/pull/612) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Refactor claim validators into their own classes [#605](https://github.com/jwt/ruby-jwt/pull/605) ([@anakinj](https://github.com/anakinj), [@MatteoPierro](https://github.com/MatteoPierro)) +- Allow extending available algorithms [#607](https://github.com/jwt/ruby-jwt/pull/607) ([@anakinj](https://github.com/anakinj)) +- Do not include the EdDSA algorithm if rbnacl not available [#613](https://github.com/jwt/ruby-jwt/pull/613) ([@anakinj](https://github.com/anakinj)) + +## [v2.8.2](https://github.com/jwt/ruby-jwt/tree/v2.8.2) (2024-06-18) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.8.1...v2.8.2) + +**Fixes and enhancements:** + +- Print deprecation warnings only on when token decoding succeeds [#600](https://github.com/jwt/ruby-jwt/pull/600) ([@anakinj](https://github.com/anakinj)) +- Unify code style [#602](https://github.com/jwt/ruby-jwt/pull/602) ([@anakinj](https://github.com/anakinj)) + +## [v2.8.1](https://github.com/jwt/ruby-jwt/tree/v2.8.1) (2024-02-29) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.8.0...v2.8.1) + +**Features:** + +- Configurable base64 decode behaviour [#589](https://github.com/jwt/ruby-jwt/pull/589) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Output deprecation warnings once [#589](https://github.com/jwt/ruby-jwt/pull/589) ([@anakinj](https://github.com/anakinj)) + +## [v2.8.0](https://github.com/jwt/ruby-jwt/tree/v2.8.0) (2024-02-17) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.7.1...v2.8.0) + +**Features:** + +- Updated rubocop to 1.56 [#573](https://github.com/jwt/ruby-jwt/pull/573) ([@anakinj](https://github.com/anakinj)) +- Run CI on Ruby 3.3 [#577](https://github.com/jwt/ruby-jwt/pull/577) ([@anakinj](https://github.com/anakinj)) +- Deprecation warning added for the HMAC algorithm HS512256 (HMAC-SHA-512 truncated to 256-bits) [#575](https://github.com/jwt/ruby-jwt/pull/575) ([@anakinj](https://github.com/anakinj)) +- Stop using RbNaCl for standard HMAC algorithms [#575](https://github.com/jwt/ruby-jwt/pull/575) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Fix signature has expired error if payload is a string [#555](https://github.com/jwt/ruby-jwt/pull/555) ([@GobinathAL](https://github.com/GobinathAL)) +- Fix key base equality and spaceship operators [#569](https://github.com/jwt/ruby-jwt/pull/569) ([@magneland](https://github.com/magneland)) +- Remove explicit base64 require from x5c_key_finder [#580](https://github.com/jwt/ruby-jwt/pull/580) ([@anakinj](https://github.com/anakinj)) +- Performance improvements and cleanup of tests [#581](https://github.com/jwt/ruby-jwt/pull/581) ([@anakinj](https://github.com/anakinj)) +- Repair EC x/y coordinates when importing JWK [#585](https://github.com/jwt/ruby-jwt/pull/585) ([@julik](https://github.com/julik)) +- Explicit dependency to the base64 gem [#582](https://github.com/jwt/ruby-jwt/pull/582) ([@anakinj](https://github.com/anakinj)) +- Deprecation warning for decoding content not compliant with RFC 4648 [#582](https://github.com/jwt/ruby-jwt/pull/582) ([@anakinj](https://github.com/anakinj)) +- Algorithms moved under the `::JWT::JWA` module ([@anakinj](https://github.com/anakinj)) + +## [v2.7.1](https://github.com/jwt/ruby-jwt/tree/v2.8.0) (2023-06-09) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.7.0...v2.7.1) + +**Fixes and enhancements:** + +- Handle invalid algorithm when decoding JWT [#559](https://github.com/jwt/ruby-jwt/pull/559) ([@nataliastanko](https://github.com/nataliastanko)) +- Do not raise error when verifying bad HMAC signature [#563](https://github.com/jwt/ruby-jwt/pull/563) ([@hieuk09](https://github.com/hieuk09)) + +## [v2.7.0](https://github.com/jwt/ruby-jwt/tree/v2.7.0) (2023-02-01) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.6.0...v2.7.0) + +**Features:** + +- Support OKP (Ed25519) keys for JWKs [#540](https://github.com/jwt/ruby-jwt/pull/540) ([@anakinj](https://github.com/anakinj)) +- JWK Sets can now be used for tokens with nil kid [#543](https://github.com/jwt/ruby-jwt/pull/543) ([@bellebaum](https://github.com/bellebaum)) + +**Fixes and enhancements:** + +- Fix issue with multiple keys returned by keyfinder and multiple allowed algorithms [#545](https://github.com/jwt/ruby-jwt/pull/545) ([@mpospelov](https://github.com/mpospelov)) +- Non-string `kid` header values are now rejected [#543](https://github.com/jwt/ruby-jwt/pull/543) ([@bellebaum](https://github.com/bellebaum)) + +## [v2.6.0](https://github.com/jwt/ruby-jwt/tree/v2.6.0) (2022-12-22) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.5.0...v2.6.0) + +**Features:** + +- Support custom algorithms by passing algorithm objects [#512](https://github.com/jwt/ruby-jwt/pull/512) ([@anakinj](https://github.com/anakinj)) +- Support descriptive (not key related) JWK parameters [#520](https://github.com/jwt/ruby-jwt/pull/520) ([@bellebaum](https://github.com/bellebaum)) +- Support for JSON Web Key Sets [#525](https://github.com/jwt/ruby-jwt/pull/525) ([@bellebaum](https://github.com/bellebaum)) +- Support HMAC keys over 32 chars when using RbNaCl [#521](https://github.com/jwt/ruby-jwt/pull/521) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Raise descriptive error on empty hmac_secret and OpenSSL 3.0/openssl gem <3.0.1 [#530](https://github.com/jwt/ruby-jwt/pull/530) ([@jonmchan](https://github.com/jonmchan)) + +## [v2.5.0](https://github.com/jwt/ruby-jwt/tree/v2.5.0) (2022-08-25) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.4.1...v2.5.0) + +**Features:** + +- Support JWK thumbprints as key ids [#481](https://github.com/jwt/ruby-jwt/pull/481) ([@anakinj](https://github.com/anakinj)) +- Support OpenSSL >= 3.0 [#496](https://github.com/jwt/ruby-jwt/pull/496) ([@anakinj](https://github.com/anakinj)) + +**Fixes and enhancements:** + +- Bring back the old Base64 (RFC2045) deocode mechanisms [#488](https://github.com/jwt/ruby-jwt/pull/488) ([@anakinj](https://github.com/anakinj)) +- Rescue RbNaCl exception for EdDSA wrong key [#491](https://github.com/jwt/ruby-jwt/pull/491) ([@n-studio](https://github.com/n-studio)) +- New parameter name for cases when kid is not found using JWK key loader proc [#501](https://github.com/jwt/ruby-jwt/pull/501) ([@anakinj](https://github.com/anakinj)) +- Fix NoMethodError when a 2 segment token is missing 'alg' header [#502](https://github.com/jwt/ruby-jwt/pull/502) ([@cmrd-senya](https://github.com/cmrd-senya)) + +## [v2.4.1](https://github.com/jwt/ruby-jwt/tree/v2.4.1) (2022-06-07) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.4.0...v2.4.1) + +**Fixes and enhancements:** + +- Raise JWT::DecodeError on invalid signature [\#484](https://github.com/jwt/ruby-jwt/pull/484) ([@freakyfelt!](https://github.com/freakyfelt!)) + +## [v2.4.0](https://github.com/jwt/ruby-jwt/tree/v2.4.0) (2022-06-06) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.3.0...v2.4.0) + +**Features:** + +- Dropped support for Ruby 2.5 and older [#453](https://github.com/jwt/ruby-jwt/pull/453) - ([@anakinj](https://github.com/anakinj)) +- Use Ruby built-in url-safe base64 methods [#454](https://github.com/jwt/ruby-jwt/pull/454) - ([@bdewater](https://github.com/bdewater)) +- Updated rubocop to 1.23.0 [#457](https://github.com/jwt/ruby-jwt/pull/457) - ([@anakinj](https://github.com/anakinj)) +- Add x5c header key finder [#338](https://github.com/jwt/ruby-jwt/pull/338) - ([@bdewater](https://github.com/bdewater)) +- Author driven changelog process [#463](https://github.com/jwt/ruby-jwt/pull/463) - ([@anakinj](https://github.com/anakinj)) +- Allow regular expressions and procs to verify issuer [\#437](https://github.com/jwt/ruby-jwt/pull/437) ([rewritten](https://github.com/rewritten)) +- Add Support to be able to verify from multiple keys [\#425](https://github.com/jwt/ruby-jwt/pull/425) ([ritikesh](https://github.com/ritikesh)) + +**Fixes and enhancements:** + +- Readme: Typo fix re MissingRequiredClaim [\#451](https://github.com/jwt/ruby-jwt/pull/451) ([antonmorant](https://github.com/antonmorant)) +- Fix RuboCop TODOs [\#476](https://github.com/jwt/ruby-jwt/pull/476) ([typhoon2099](https://github.com/typhoon2099)) +- Make specific algorithms in README linkable [\#472](https://github.com/jwt/ruby-jwt/pull/472) ([milieu](https://github.com/milieu)) +- Update note about supported JWK types [\#475](https://github.com/jwt/ruby-jwt/pull/475) ([dpashkevich](https://github.com/dpashkevich)) +- Create CODE_OF_CONDUCT.md [\#449](https://github.com/jwt/ruby-jwt/pull/449) ([loic5](https://github.com/loic5)) + +## [v2.3.0](https://github.com/jwt/ruby-jwt/tree/v2.3.0) (2021-10-03) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.2.3...v2.3.0) + +**Closed issues:** + +- \[SECURITY\] Algorithm Confusion Through kid Header [\#440](https://github.com/jwt/ruby-jwt/issues/440) +- JWT to memory [\#436](https://github.com/jwt/ruby-jwt/issues/436) +- ArgumentError: wrong number of arguments \(given 2, expected 1\) [\#429](https://github.com/jwt/ruby-jwt/issues/429) +- HMAC section of README outdated [\#421](https://github.com/jwt/ruby-jwt/issues/421) +- NoMethodError: undefined method `zero?' for nil:NilClass if JWT has no 'alg' field [\#410](https://github.com/jwt/ruby-jwt/issues/410) +- Release new version [\#409](https://github.com/jwt/ruby-jwt/issues/409) +- NameError: uninitialized constant JWT::JWK [\#403](https://github.com/jwt/ruby-jwt/issues/403) + +**Merged pull requests:** + +- Release 2.3.0 [\#448](https://github.com/jwt/ruby-jwt/pull/448) ([excpt](https://github.com/excpt)) +- Fix Style/MultilineIfModifier issues [\#447](https://github.com/jwt/ruby-jwt/pull/447) ([anakinj](https://github.com/anakinj)) +- feat\(EdDSA\): Accept EdDSA as algorithm header [\#446](https://github.com/jwt/ruby-jwt/pull/446) ([Pierre-Michard](https://github.com/Pierre-Michard)) +- Pass kid param through JWT::JWK.create_from [\#445](https://github.com/jwt/ruby-jwt/pull/445) ([shaun-guth-allscripts](https://github.com/shaun-guth-allscripts)) +- fix document about passing JWKs as a simple Hash [\#443](https://github.com/jwt/ruby-jwt/pull/443) ([takayamaki](https://github.com/takayamaki)) +- Tests for mixing JWK keys with mismatching algorithms [\#441](https://github.com/jwt/ruby-jwt/pull/441) ([anakinj](https://github.com/anakinj)) +- verify_claims test shouldnt be within the verify_sub test [\#431](https://github.com/jwt/ruby-jwt/pull/431) ([andyjdavis](https://github.com/andyjdavis)) +- Allow decode options to specify required claims [\#430](https://github.com/jwt/ruby-jwt/pull/430) ([andyjdavis](https://github.com/andyjdavis)) +- Fix OpenSSL::PKey::EC public_key handing in tests [\#427](https://github.com/jwt/ruby-jwt/pull/427) ([anakinj](https://github.com/anakinj)) +- Add documentation for find_key [\#426](https://github.com/jwt/ruby-jwt/pull/426) ([ritikesh](https://github.com/ritikesh)) +- Give ruby 3.0 as a string to avoid number formatting issues [\#424](https://github.com/jwt/ruby-jwt/pull/424) ([anakinj](https://github.com/anakinj)) +- Tests for iat verification behaviour [\#423](https://github.com/jwt/ruby-jwt/pull/423) ([anakinj](https://github.com/anakinj)) +- Remove HMAC with nil secret from documentation [\#422](https://github.com/jwt/ruby-jwt/pull/422) ([boardfish](https://github.com/boardfish)) +- Update broken link in README [\#420](https://github.com/jwt/ruby-jwt/pull/420) ([severin](https://github.com/severin)) +- Add metadata for RubyGems [\#418](https://github.com/jwt/ruby-jwt/pull/418) ([nickhammond](https://github.com/nickhammond)) +- Fixed a typo about class name [\#417](https://github.com/jwt/ruby-jwt/pull/417) ([mai-f](https://github.com/mai-f)) +- Fix references for v2.2.3 on CHANGELOG [\#416](https://github.com/jwt/ruby-jwt/pull/416) ([vyper](https://github.com/vyper)) +- Raise IncorrectAlgorithm if token has no alg header [\#411](https://github.com/jwt/ruby-jwt/pull/411) ([bouk](https://github.com/bouk)) + +## [v2.2.3](https://github.com/jwt/ruby-jwt/tree/v2.2.3) (2021-04-19) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.2.2...v2.2.3) + +**Implemented enhancements:** + +- Verify algorithm before evaluating keyfinder [\#343](https://github.com/jwt/ruby-jwt/issues/343) +- Why jwt depends on json \< 2.0 ? [\#179](https://github.com/jwt/ruby-jwt/issues/179) +- Support for JWK in-lieu of rsa_public [\#158](https://github.com/jwt/ruby-jwt/issues/158) +- Fix rspec `raise_error` warning [\#413](https://github.com/jwt/ruby-jwt/pull/413) ([excpt](https://github.com/excpt)) +- Add support for JWKs with HMAC key type. [\#372](https://github.com/jwt/ruby-jwt/pull/372) ([phlegx](https://github.com/phlegx)) +- Improve 'none' algorithm handling [\#365](https://github.com/jwt/ruby-jwt/pull/365) ([danleyden](https://github.com/danleyden)) +- Handle parsed JSON JWKS input with string keys [\#348](https://github.com/jwt/ruby-jwt/pull/348) ([martinemde](https://github.com/martinemde)) +- Allow Numeric values during encoding [\#327](https://github.com/jwt/ruby-jwt/pull/327) ([fanfilmu](https://github.com/fanfilmu)) + +**Closed issues:** + +- "Signature verification raised", yet jwt.io says "Signature Verified" [\#401](https://github.com/jwt/ruby-jwt/issues/401) +- truffleruby-head build is failing [\#396](https://github.com/jwt/ruby-jwt/issues/396) +- JWT::JWK::EC needs `require 'forwardable'` [\#392](https://github.com/jwt/ruby-jwt/issues/392) +- How to use a 'signing key' as used by next-auth [\#389](https://github.com/jwt/ruby-jwt/issues/389) +- undefined method `verify' for nil:NilClass when validate a JWT with JWK [\#383](https://github.com/jwt/ruby-jwt/issues/383) +- Make specifying "algorithm" optional on decode [\#380](https://github.com/jwt/ruby-jwt/issues/380) +- ADFS created access tokens can't be validated due to missing 'kid' header [\#370](https://github.com/jwt/ruby-jwt/issues/370) +- new version? [\#355](https://github.com/jwt/ruby-jwt/issues/355) +- JWT gitlab OmniAuth provider setup support [\#354](https://github.com/jwt/ruby-jwt/issues/354) +- Release with support for RSA.import for ruby \< 2.4 hasn't been released [\#347](https://github.com/jwt/ruby-jwt/issues/347) +- cannot load such file -- jwt [\#339](https://github.com/jwt/ruby-jwt/issues/339) + +**Merged pull requests:** + +- Prepare 2.2.3 release [\#415](https://github.com/jwt/ruby-jwt/pull/415) ([excpt](https://github.com/excpt)) +- Remove codeclimate code coverage dev dependency [\#414](https://github.com/jwt/ruby-jwt/pull/414) ([excpt](https://github.com/excpt)) +- Add forwardable dependency [\#408](https://github.com/jwt/ruby-jwt/pull/408) ([anakinj](https://github.com/anakinj)) +- Ignore casing of algorithm [\#405](https://github.com/jwt/ruby-jwt/pull/405) ([johnnyshields](https://github.com/johnnyshields)) +- Document function and add tests for verify claims method [\#404](https://github.com/jwt/ruby-jwt/pull/404) ([yasonk](https://github.com/yasonk)) +- documenting calling verify_jti callback with 2 arguments in the readme [\#402](https://github.com/jwt/ruby-jwt/pull/402) ([HoneyryderChuck](https://github.com/HoneyryderChuck)) +- Target the master branch on the build status badge [\#399](https://github.com/jwt/ruby-jwt/pull/399) ([anakinj](https://github.com/anakinj)) +- Improving the local development experience [\#397](https://github.com/jwt/ruby-jwt/pull/397) ([anakinj](https://github.com/anakinj)) +- Fix sourcelevel broken links [\#395](https://github.com/jwt/ruby-jwt/pull/395) ([anakinj](https://github.com/anakinj)) +- Don't recommend installing gem with sudo [\#391](https://github.com/jwt/ruby-jwt/pull/391) ([tjschuck](https://github.com/tjschuck)) +- Enable rubocop locally and on ci [\#390](https://github.com/jwt/ruby-jwt/pull/390) ([anakinj](https://github.com/anakinj)) +- Ci and test cleanup [\#387](https://github.com/jwt/ruby-jwt/pull/387) ([anakinj](https://github.com/anakinj)) +- Make JWT::JWK::EC compatible with Ruby 2.3 [\#386](https://github.com/jwt/ruby-jwt/pull/386) ([anakinj](https://github.com/anakinj)) +- Support JWKs for pre 2.3 rubies [\#382](https://github.com/jwt/ruby-jwt/pull/382) ([anakinj](https://github.com/anakinj)) +- Replace Travis CI with GitHub Actions \(also favor openssl/rbnacl combinations over rails compatibility tests\) [\#381](https://github.com/jwt/ruby-jwt/pull/381) ([anakinj](https://github.com/anakinj)) +- Add auth0 sponsor message [\#379](https://github.com/jwt/ruby-jwt/pull/379) ([excpt](https://github.com/excpt)) +- Adapt HMAC to JWK RSA code style. [\#378](https://github.com/jwt/ruby-jwt/pull/378) ([phlegx](https://github.com/phlegx)) +- Disable Rails cops [\#376](https://github.com/jwt/ruby-jwt/pull/376) ([anakinj](https://github.com/anakinj)) +- Support exporting RSA JWK private keys [\#375](https://github.com/jwt/ruby-jwt/pull/375) ([anakinj](https://github.com/anakinj)) +- Ebert is SourceLevel nowadays [\#374](https://github.com/jwt/ruby-jwt/pull/374) ([anakinj](https://github.com/anakinj)) +- Add support for JWKs with EC key type [\#371](https://github.com/jwt/ruby-jwt/pull/371) ([richardlarocque](https://github.com/richardlarocque)) +- Add Truffleruby head to CI [\#368](https://github.com/jwt/ruby-jwt/pull/368) ([gogainda](https://github.com/gogainda)) +- Add more docs about JWK support [\#341](https://github.com/jwt/ruby-jwt/pull/341) ([take](https://github.com/take)) + +## [v2.2.2](https://github.com/jwt/ruby-jwt/tree/v2.2.2) (2020-08-18) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.2.1...v2.2.2) + +**Implemented enhancements:** + +- JWK does not decode. [\#332](https://github.com/jwt/ruby-jwt/issues/332) +- Inconsistent use of symbol and string keys in args \(exp and alrogithm\). [\#331](https://github.com/jwt/ruby-jwt/issues/331) +- Pin simplecov to \< 0.18 [\#356](https://github.com/jwt/ruby-jwt/pull/356) ([anakinj](https://github.com/anakinj)) +- verifies algorithm before evaluating keyfinder [\#346](https://github.com/jwt/ruby-jwt/pull/346) ([jb08](https://github.com/jb08)) +- Update Rails 6 appraisal to use actual release version [\#336](https://github.com/jwt/ruby-jwt/pull/336) ([smudge](https://github.com/smudge)) +- Update Travis [\#326](https://github.com/jwt/ruby-jwt/pull/326) ([berkos](https://github.com/berkos)) +- Improvement/encode hmac without key [\#312](https://github.com/jwt/ruby-jwt/pull/312) ([JotaSe](https://github.com/JotaSe)) + +**Fixed bugs:** + +- v2.2.1 warning: already initialized constant JWT Error [\#335](https://github.com/jwt/ruby-jwt/issues/335) +- 2.2.1 is no longer raising `JWT::DecodeError` on `nil` verification key [\#328](https://github.com/jwt/ruby-jwt/issues/328) +- Fix algorithm picking from decode options [\#359](https://github.com/jwt/ruby-jwt/pull/359) ([excpt](https://github.com/excpt)) +- Raise error when verification key is empty [\#358](https://github.com/jwt/ruby-jwt/pull/358) ([anakinj](https://github.com/anakinj)) + +**Closed issues:** + +- JWT RSA: is it possible to encrypt using the public key? [\#366](https://github.com/jwt/ruby-jwt/issues/366) +- Example unsigned token that bypasses verification [\#364](https://github.com/jwt/ruby-jwt/issues/364) +- Verify exp claim/field even if it's not present [\#363](https://github.com/jwt/ruby-jwt/issues/363) +- Decode any token [\#360](https://github.com/jwt/ruby-jwt/issues/360) +- \[question\] example of using a pub/priv keys for signing? [\#351](https://github.com/jwt/ruby-jwt/issues/351) +- JWT::ExpiredSignature raised for non-JSON payloads [\#350](https://github.com/jwt/ruby-jwt/issues/350) +- verify_aud only verifies that at least one aud is expected [\#345](https://github.com/jwt/ruby-jwt/issues/345) +- Sinatra 4.90s TTFB [\#344](https://github.com/jwt/ruby-jwt/issues/344) +- How to Logout [\#342](https://github.com/jwt/ruby-jwt/issues/342) +- jwt token decoding even when wrong token is provided for some letters [\#337](https://github.com/jwt/ruby-jwt/issues/337) +- Need to use `symbolize_keys` everywhere! [\#330](https://github.com/jwt/ruby-jwt/issues/330) +- eval\(\) used in Forwardable limits usage in iOS App Store [\#324](https://github.com/jwt/ruby-jwt/issues/324) +- HS512256 OpenSSL Exception: First num too large [\#322](https://github.com/jwt/ruby-jwt/issues/322) +- Can we change the separator character? [\#321](https://github.com/jwt/ruby-jwt/issues/321) +- Verifying iat without leeway may break with poorly synced clocks [\#319](https://github.com/jwt/ruby-jwt/issues/319) +- Adding support for 'hd' hosted domain string [\#314](https://github.com/jwt/ruby-jwt/issues/314) +- There is no "typ" header in version 2.0.0 [\#233](https://github.com/jwt/ruby-jwt/issues/233) + +**Merged pull requests:** + +- Release v2.2.2 [\#367](https://github.com/jwt/ruby-jwt/pull/367) ([excpt](https://github.com/excpt)) +- Fix 'already initialized constant JWT Error' [\#357](https://github.com/jwt/ruby-jwt/pull/357) ([excpt](https://github.com/excpt)) +- Support RSA.import for all Ruby versions. [\#333](https://github.com/jwt/ruby-jwt/pull/333) ([rabajaj0509](https://github.com/rabajaj0509)) +- Removed forwardable dependency [\#325](https://github.com/jwt/ruby-jwt/pull/325) ([anakinj](https://github.com/anakinj)) + +## [v2.2.1](https://github.com/jwt/ruby-jwt/tree/v2.2.1) (2019-05-24) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.2.0...v2.2.1) + +**Fixed bugs:** + +- need to `require 'forwardable'` to use `Forwardable` [\#316](https://github.com/jwt/ruby-jwt/issues/316) +- Add forwardable dependency for JWK RSA KeyFinder [\#317](https://github.com/jwt/ruby-jwt/pull/317) ([excpt](https://github.com/excpt)) + +**Merged pull requests:** + +- Release 2.2.1 [\#318](https://github.com/jwt/ruby-jwt/pull/318) ([excpt](https://github.com/excpt)) + +## [v2.2.0](https://github.com/jwt/ruby-jwt/tree/v2.2.0) (2019-05-23) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.2.0.pre.beta.0...v2.2.0) + +**Closed issues:** + +- misspelled es512 curve name [\#310](https://github.com/jwt/ruby-jwt/issues/310) +- With Base64 decode i can read the hashed content [\#306](https://github.com/jwt/ruby-jwt/issues/306) +- hide post-it's for graphviz views [\#303](https://github.com/jwt/ruby-jwt/issues/303) + +**Merged pull requests:** + +- Release 2.2.0 [\#315](https://github.com/jwt/ruby-jwt/pull/315) ([excpt](https://github.com/excpt)) + +## [v2.2.0.pre.beta.0](https://github.com/jwt/ruby-jwt/tree/v2.2.0.pre.beta.0) (2019-03-20) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.1.0...v2.2.0.pre.beta.0) + +**Implemented enhancements:** + +- Use iat_leeway option [\#273](https://github.com/jwt/ruby-jwt/issues/273) +- Use of global state in latest version breaks thread safety of JWT.decode [\#268](https://github.com/jwt/ruby-jwt/issues/268) +- JSON support [\#246](https://github.com/jwt/ruby-jwt/issues/246) +- Change the Github homepage URL to https [\#301](https://github.com/jwt/ruby-jwt/pull/301) ([ekohl](https://github.com/ekohl)) +- Fix Salt length for conformance with PS family specification. [\#300](https://github.com/jwt/ruby-jwt/pull/300) ([tobypinder](https://github.com/tobypinder)) +- Add support for Ruby 2.6 [\#299](https://github.com/jwt/ruby-jwt/pull/299) ([bustikiller](https://github.com/bustikiller)) +- update homepage in gemspec to use HTTPS [\#298](https://github.com/jwt/ruby-jwt/pull/298) ([evgeni](https://github.com/evgeni)) +- Make sure alg parameter value isn't added twice [\#297](https://github.com/jwt/ruby-jwt/pull/297) ([korstiaan](https://github.com/korstiaan)) +- Claims Validation [\#295](https://github.com/jwt/ruby-jwt/pull/295) ([jamesstonehill](https://github.com/jamesstonehill)) +- JWT::Encode refactorings, alg and exp related bugfixes [\#293](https://github.com/jwt/ruby-jwt/pull/293) ([anakinj](https://github.com/anakinj)) +- Proposal of simple JWK support [\#289](https://github.com/jwt/ruby-jwt/pull/289) ([anakinj](https://github.com/anakinj)) +- Add RSASSA-PSS signature signing support [\#285](https://github.com/jwt/ruby-jwt/pull/285) ([oliver-hohn](https://github.com/oliver-hohn)) +- Add note about using a hard coded algorithm in README [\#280](https://github.com/jwt/ruby-jwt/pull/280) ([revodoge](https://github.com/revodoge)) +- Add Appraisal support [\#278](https://github.com/jwt/ruby-jwt/pull/278) ([olbrich](https://github.com/olbrich)) +- Fix decode threading issue [\#269](https://github.com/jwt/ruby-jwt/pull/269) ([ab320012](https://github.com/ab320012)) +- Removed leeway from verify_iat [\#257](https://github.com/jwt/ruby-jwt/pull/257) ([ab320012](https://github.com/ab320012)) + +**Fixed bugs:** + +- Inconsistent handling of payload claim data types [\#282](https://github.com/jwt/ruby-jwt/issues/282) +- Issued at validation [\#247](https://github.com/jwt/ruby-jwt/issues/247) +- Fix bug and simplify segment validation [\#292](https://github.com/jwt/ruby-jwt/pull/292) ([anakinj](https://github.com/anakinj)) + +**Security fixes:** + +- Decoding JWT with ES256 and secp256k1 curve [\#277](https://github.com/jwt/ruby-jwt/issues/277) + +**Closed issues:** + +- RS256, public and private keys [\#291](https://github.com/jwt/ruby-jwt/issues/291) +- Allow passing current time to `decode` [\#288](https://github.com/jwt/ruby-jwt/issues/288) +- Verify exp claim without verifying jwt [\#281](https://github.com/jwt/ruby-jwt/issues/281) +- Audience as an array - how to specify? [\#276](https://github.com/jwt/ruby-jwt/issues/276) +- signature validation using decode method for JWT [\#271](https://github.com/jwt/ruby-jwt/issues/271) +- JWT is easily breakable [\#267](https://github.com/jwt/ruby-jwt/issues/267) +- Ruby JWT Token [\#265](https://github.com/jwt/ruby-jwt/issues/265) +- ECDSA supported algorithms constant is defined as a string, not an array [\#264](https://github.com/jwt/ruby-jwt/issues/264) +- NoMethodError: undefined method `group' for \ [\#261](https://github.com/jwt/ruby-jwt/issues/261) +- 'DecodeError'will replace 'ExpiredSignature' [\#260](https://github.com/jwt/ruby-jwt/issues/260) +- TypeError: no implicit conversion of OpenSSL::PKey::RSA into String [\#259](https://github.com/jwt/ruby-jwt/issues/259) +- NameError: uninitialized constant JWT::Algos::Eddsa::RbNaCl [\#258](https://github.com/jwt/ruby-jwt/issues/258) +- Get new token if curren token expired [\#256](https://github.com/jwt/ruby-jwt/issues/256) +- Infer algorithm from header [\#254](https://github.com/jwt/ruby-jwt/issues/254) +- Why is the result of decode is an array? [\#252](https://github.com/jwt/ruby-jwt/issues/252) +- Add support for headless token [\#251](https://github.com/jwt/ruby-jwt/issues/251) +- Leeway or exp_leeway [\#215](https://github.com/jwt/ruby-jwt/issues/215) +- Could you describe purpose of cert fixtures and their cryptokey lengths. [\#185](https://github.com/jwt/ruby-jwt/issues/185) + +**Merged pull requests:** + +- Release v2.2.0-beta.0 [\#302](https://github.com/jwt/ruby-jwt/pull/302) ([excpt](https://github.com/excpt)) +- Misc config improvements [\#296](https://github.com/jwt/ruby-jwt/pull/296) ([jamesstonehill](https://github.com/jamesstonehill)) +- Fix JSON conflict between \#293 and \#292 [\#294](https://github.com/jwt/ruby-jwt/pull/294) ([anakinj](https://github.com/anakinj)) +- Drop Ruby 2.2 from test matrix [\#290](https://github.com/jwt/ruby-jwt/pull/290) ([anakinj](https://github.com/anakinj)) +- Remove broken reek config [\#283](https://github.com/jwt/ruby-jwt/pull/283) ([excpt](https://github.com/excpt)) +- Add missing test, Update common files [\#275](https://github.com/jwt/ruby-jwt/pull/275) ([excpt](https://github.com/excpt)) +- Remove iat_leeway option [\#274](https://github.com/jwt/ruby-jwt/pull/274) ([wohlgejm](https://github.com/wohlgejm)) +- improving code quality of jwt module [\#266](https://github.com/jwt/ruby-jwt/pull/266) ([ab320012](https://github.com/ab320012)) +- fixed ECDSA supported versions const [\#263](https://github.com/jwt/ruby-jwt/pull/263) ([starbeast](https://github.com/starbeast)) +- Added my name to contributor list [\#262](https://github.com/jwt/ruby-jwt/pull/262) ([ab320012](https://github.com/ab320012)) +- Use `Class#new` Shorthand For Error Subclasses [\#255](https://github.com/jwt/ruby-jwt/pull/255) ([akabiru](https://github.com/akabiru)) +- \[CI\] Test against Ruby 2.5 [\#253](https://github.com/jwt/ruby-jwt/pull/253) ([nicolasleger](https://github.com/nicolasleger)) +- Fix README [\#250](https://github.com/jwt/ruby-jwt/pull/250) ([rono23](https://github.com/rono23)) +- Fix link format [\#248](https://github.com/jwt/ruby-jwt/pull/248) ([y-yagi](https://github.com/y-yagi)) + +## [v2.1.0](https://github.com/jwt/ruby-jwt/tree/v2.1.0) (2017-10-06) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.0.0...v2.1.0) + +**Implemented enhancements:** + +- Ed25519 support planned? [\#217](https://github.com/jwt/ruby-jwt/issues/217) +- Verify JTI Proc [\#207](https://github.com/jwt/ruby-jwt/issues/207) +- Allow a list of algorithms for decode [\#241](https://github.com/jwt/ruby-jwt/pull/241) ([lautis](https://github.com/lautis)) +- verify takes 2 params, second being payload closes: \#207 [\#238](https://github.com/jwt/ruby-jwt/pull/238) ([ab320012](https://github.com/ab320012)) +- simplified logic for keyfinder [\#237](https://github.com/jwt/ruby-jwt/pull/237) ([ab320012](https://github.com/ab320012)) +- Show backtrace if rbnacl-libsodium not loaded [\#231](https://github.com/jwt/ruby-jwt/pull/231) ([buzztaiki](https://github.com/buzztaiki)) +- Support for ED25519 [\#229](https://github.com/jwt/ruby-jwt/pull/229) ([ab320012](https://github.com/ab320012)) + +**Fixed bugs:** + +- JWT.encode failing on encode for string [\#235](https://github.com/jwt/ruby-jwt/issues/235) +- The README says it uses an algorithm by default [\#226](https://github.com/jwt/ruby-jwt/issues/226) +- Fix string payload issue [\#236](https://github.com/jwt/ruby-jwt/pull/236) ([excpt](https://github.com/excpt)) + +**Security fixes:** + +- Add HS256 algorithm to decode default options [\#228](https://github.com/jwt/ruby-jwt/pull/228) ([marcoadkins](https://github.com/marcoadkins)) + +**Closed issues:** + +- Change from 1.5.6 to 2.0.0 and appears a "Completed 401 Unauthorized" [\#240](https://github.com/jwt/ruby-jwt/issues/240) +- Why doesn't the decode function use a default algorithm? [\#227](https://github.com/jwt/ruby-jwt/issues/227) + +**Merged pull requests:** + +- Release 2.1.0 preparations [\#243](https://github.com/jwt/ruby-jwt/pull/243) ([excpt](https://github.com/excpt)) +- Update README.md [\#242](https://github.com/jwt/ruby-jwt/pull/242) ([excpt](https://github.com/excpt)) +- Update ebert configuration [\#232](https://github.com/jwt/ruby-jwt/pull/232) ([excpt](https://github.com/excpt)) +- added algos/strategy classes + structs for inputs [\#230](https://github.com/jwt/ruby-jwt/pull/230) ([ab320012](https://github.com/ab320012)) + +## [v2.0.0](https://github.com/jwt/ruby-jwt/tree/v2.0.0) (2017-09-03) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v2.0.0.beta1...v2.0.0) + +**Fixed bugs:** + +- Support versions outside 2.1 [\#209](https://github.com/jwt/ruby-jwt/issues/209) +- Verifying expiration without leeway throws exception [\#206](https://github.com/jwt/ruby-jwt/issues/206) +- Ruby interpreter warning [\#200](https://github.com/jwt/ruby-jwt/issues/200) +- TypeError: no implicit conversion of String into Integer [\#188](https://github.com/jwt/ruby-jwt/issues/188) +- Fix JWT.encode\(nil\) [\#203](https://github.com/jwt/ruby-jwt/pull/203) ([tmm1](https://github.com/tmm1)) + +**Closed issues:** + +- Possibility to disable claim verifications [\#222](https://github.com/jwt/ruby-jwt/issues/222) +- Proper way to verify Firebase id tokens [\#216](https://github.com/jwt/ruby-jwt/issues/216) + +**Merged pull requests:** + +- Release 2.0.0 preparations :\) [\#225](https://github.com/jwt/ruby-jwt/pull/225) ([excpt](https://github.com/excpt)) +- Skip 'exp' claim validation for array payloads [\#224](https://github.com/jwt/ruby-jwt/pull/224) ([excpt](https://github.com/excpt)) +- Use a default leeway of 0 [\#223](https://github.com/jwt/ruby-jwt/pull/223) ([travisofthenorth](https://github.com/travisofthenorth)) +- Fix reported codesmells [\#221](https://github.com/jwt/ruby-jwt/pull/221) ([excpt](https://github.com/excpt)) +- Add fancy gem version badge [\#220](https://github.com/jwt/ruby-jwt/pull/220) ([excpt](https://github.com/excpt)) +- Add missing dist option to .travis.yml [\#219](https://github.com/jwt/ruby-jwt/pull/219) ([excpt](https://github.com/excpt)) +- Fix ruby version requirements in gemspec file [\#218](https://github.com/jwt/ruby-jwt/pull/218) ([excpt](https://github.com/excpt)) +- Fix a little typo in the readme [\#214](https://github.com/jwt/ruby-jwt/pull/214) ([RyanBrushett](https://github.com/RyanBrushett)) +- Update README.md [\#212](https://github.com/jwt/ruby-jwt/pull/212) ([zuzannast](https://github.com/zuzannast)) +- Fix typo in HS512256 algorithm description [\#211](https://github.com/jwt/ruby-jwt/pull/211) ([ojab](https://github.com/ojab)) +- Allow configuration of multiple acceptable issuers [\#210](https://github.com/jwt/ruby-jwt/pull/210) ([ojab](https://github.com/ojab)) +- Enforce `exp` to be an `Integer` [\#205](https://github.com/jwt/ruby-jwt/pull/205) ([lucasmazza](https://github.com/lucasmazza)) +- ruby 1.9.3 support message upd [\#204](https://github.com/jwt/ruby-jwt/pull/204) ([maokomioko](https://github.com/maokomioko)) + +## [v2.0.0.beta1](https://github.com/jwt/ruby-jwt/tree/v2.0.0.beta1) (2017-02-27) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v1.5.6...v2.0.0.beta1) + +**Implemented enhancements:** + +- Error with method sign for String [\#171](https://github.com/jwt/ruby-jwt/issues/171) +- Refactor the encondig code [\#121](https://github.com/jwt/ruby-jwt/issues/121) +- Refactor [\#196](https://github.com/jwt/ruby-jwt/pull/196) ([EmilioCristalli](https://github.com/EmilioCristalli)) +- Move signature logic to its own module [\#195](https://github.com/jwt/ruby-jwt/pull/195) ([EmilioCristalli](https://github.com/EmilioCristalli)) +- Add options for claim-specific leeway [\#187](https://github.com/jwt/ruby-jwt/pull/187) ([EmilioCristalli](https://github.com/EmilioCristalli)) +- Add user friendly encode error if private key is a String, \#171 [\#176](https://github.com/jwt/ruby-jwt/pull/176) ([ogonki-vetochki](https://github.com/ogonki-vetochki)) +- Return empty string if signature less than byte_size \#155 [\#175](https://github.com/jwt/ruby-jwt/pull/175) ([ogonki-vetochki](https://github.com/ogonki-vetochki)) +- Remove 'typ' optional parameter [\#174](https://github.com/jwt/ruby-jwt/pull/174) ([ogonki-vetochki](https://github.com/ogonki-vetochki)) +- Pass payload to keyfinder [\#172](https://github.com/jwt/ruby-jwt/pull/172) ([CodeMonkeySteve](https://github.com/CodeMonkeySteve)) +- Use RbNaCl for HMAC if available with fallback to OpenSSL [\#149](https://github.com/jwt/ruby-jwt/pull/149) ([mwpastore](https://github.com/mwpastore)) + +**Fixed bugs:** + +- ruby-jwt::raw_to_asn1: Fails for signatures less than byte_size [\#155](https://github.com/jwt/ruby-jwt/issues/155) +- The leeway parameter is applies to all time based verifications [\#129](https://github.com/jwt/ruby-jwt/issues/129) +- Make algorithm option required to verify signature [\#184](https://github.com/jwt/ruby-jwt/pull/184) ([EmilioCristalli](https://github.com/EmilioCristalli)) +- Validate audience when payload is a scalar and options is an array [\#183](https://github.com/jwt/ruby-jwt/pull/183) ([steti](https://github.com/steti)) + +**Closed issues:** + +- Different encoded value between servers with same password [\#197](https://github.com/jwt/ruby-jwt/issues/197) +- Signature is different at each run [\#190](https://github.com/jwt/ruby-jwt/issues/190) +- Include custom headers with password [\#189](https://github.com/jwt/ruby-jwt/issues/189) +- can't create token - 'NotImplementedError: Unsupported signing method' [\#186](https://github.com/jwt/ruby-jwt/issues/186) +- Cannot verify JWT at all?? [\#177](https://github.com/jwt/ruby-jwt/issues/177) +- verify_iss: true is raising JWT::DecodeError instead of JWT::InvalidIssuerError [\#170](https://github.com/jwt/ruby-jwt/issues/170) + +**Merged pull requests:** + +- Version bump 2.0.0.beta1 [\#199](https://github.com/jwt/ruby-jwt/pull/199) ([excpt](https://github.com/excpt)) +- Update CHANGELOG.md and minor fixes [\#198](https://github.com/jwt/ruby-jwt/pull/198) ([excpt](https://github.com/excpt)) +- Add Codacy coverage reporter [\#194](https://github.com/jwt/ruby-jwt/pull/194) ([excpt](https://github.com/excpt)) +- Add minimum required ruby version to gemspec [\#193](https://github.com/jwt/ruby-jwt/pull/193) ([excpt](https://github.com/excpt)) +- Code smell fixes [\#192](https://github.com/jwt/ruby-jwt/pull/192) ([excpt](https://github.com/excpt)) +- Version bump to 2.0.0.dev [\#191](https://github.com/jwt/ruby-jwt/pull/191) ([excpt](https://github.com/excpt)) +- Basic encode module refactoring \#121 [\#182](https://github.com/jwt/ruby-jwt/pull/182) ([ogonki-vetochki](https://github.com/ogonki-vetochki)) +- Fix travis ci build configuration [\#181](https://github.com/jwt/ruby-jwt/pull/181) ([excpt](https://github.com/excpt)) +- Fix travis ci build configuration [\#180](https://github.com/jwt/ruby-jwt/pull/180) ([excpt](https://github.com/excpt)) +- Fix typo in README [\#178](https://github.com/jwt/ruby-jwt/pull/178) ([tomeduarte](https://github.com/tomeduarte)) +- Fix code style [\#173](https://github.com/jwt/ruby-jwt/pull/173) ([excpt](https://github.com/excpt)) +- Fixed a typo in a spec name [\#169](https://github.com/jwt/ruby-jwt/pull/169) ([mingan](https://github.com/mingan)) + +## [v1.5.6](https://github.com/jwt/ruby-jwt/tree/v1.5.6) (2016-09-19) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v1.5.5...v1.5.6) + +**Fixed bugs:** + +- Fix missing symbol handling in aud verify code [\#166](https://github.com/jwt/ruby-jwt/pull/166) ([excpt](https://github.com/excpt)) + +**Merged pull requests:** + +- Update changelog [\#168](https://github.com/jwt/ruby-jwt/pull/168) ([excpt](https://github.com/excpt)) +- Fix rubocop code smells [\#167](https://github.com/jwt/ruby-jwt/pull/167) ([excpt](https://github.com/excpt)) + +## [v1.5.5](https://github.com/jwt/ruby-jwt/tree/v1.5.5) (2016-09-16) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v1.5.4...v1.5.5) + +**Implemented enhancements:** + +- JWT.decode always raises JWT::ExpiredSignature for tokens created with Time objects passed as the `exp` parameter [\#148](https://github.com/jwt/ruby-jwt/issues/148) + +**Fixed bugs:** + +- expiration check does not give "Signature has expired" error for the exact time of expiration [\#157](https://github.com/jwt/ruby-jwt/issues/157) +- JTI claim broken? [\#152](https://github.com/jwt/ruby-jwt/issues/152) +- Audience Claim broken? [\#151](https://github.com/jwt/ruby-jwt/issues/151) +- 1.5.3 breaks compatibility with 1.5.2 [\#133](https://github.com/jwt/ruby-jwt/issues/133) +- Version 1.5.3 breaks 1.9.3 compatibility, but not documented as such [\#132](https://github.com/jwt/ruby-jwt/issues/132) +- Fix: exp claim check [\#161](https://github.com/jwt/ruby-jwt/pull/161) ([excpt](https://github.com/excpt)) + +**Security fixes:** + +- \[security\] Signature verified after expiration/sub/iss checks [\#153](https://github.com/jwt/ruby-jwt/issues/153) +- Signature validation before claim verification [\#160](https://github.com/jwt/ruby-jwt/pull/160) ([excpt](https://github.com/excpt)) + +**Closed issues:** + +- Rendering Json Results in JWT::DecodeError [\#162](https://github.com/jwt/ruby-jwt/issues/162) +- PHP Libraries [\#154](https://github.com/jwt/ruby-jwt/issues/154) +- Is ruby-jwt thread-safe? [\#150](https://github.com/jwt/ruby-jwt/issues/150) +- JWT 1.5.3 [\#143](https://github.com/jwt/ruby-jwt/issues/143) +- gem install v 1.5.3 returns error [\#141](https://github.com/jwt/ruby-jwt/issues/141) +- Adding a CHANGELOG [\#140](https://github.com/jwt/ruby-jwt/issues/140) + +**Merged pull requests:** + +- Bump version [\#165](https://github.com/jwt/ruby-jwt/pull/165) ([excpt](https://github.com/excpt)) +- Improve error message for exp claim in payload [\#164](https://github.com/jwt/ruby-jwt/pull/164) ([excpt](https://github.com/excpt)) +- Fix \#151 and code refactoring [\#163](https://github.com/jwt/ruby-jwt/pull/163) ([excpt](https://github.com/excpt)) +- Create specs for README.md examples [\#159](https://github.com/jwt/ruby-jwt/pull/159) ([excpt](https://github.com/excpt)) +- Tiny Readme Improvement [\#156](https://github.com/jwt/ruby-jwt/pull/156) ([b264](https://github.com/b264)) +- Added test execution to Rakefile [\#147](https://github.com/jwt/ruby-jwt/pull/147) ([jabbrwcky](https://github.com/jabbrwcky)) +- Bump version [\#145](https://github.com/jwt/ruby-jwt/pull/145) ([excpt](https://github.com/excpt)) +- Add a changelog file [\#142](https://github.com/jwt/ruby-jwt/pull/142) ([excpt](https://github.com/excpt)) +- Return decoded_segments [\#139](https://github.com/jwt/ruby-jwt/pull/139) ([akostrikov](https://github.com/akostrikov)) + +## [v1.5.4](https://github.com/jwt/ruby-jwt/tree/v1.5.4) (2016-03-24) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/v1.5.3...v1.5.4) + +**Closed issues:** + +- 404 at [https://rubygems.global.ssl.fastly.net/gems/jwt-1.5.3.gem](https://rubygems.global.ssl.fastly.net/gems/jwt-1.5.3.gem) [\#137](https://github.com/jwt/ruby-jwt/issues/137) + +**Merged pull requests:** + +- Update README.md [\#138](https://github.com/jwt/ruby-jwt/pull/138) ([excpt](https://github.com/excpt)) +- Fix base64url_decode [\#136](https://github.com/jwt/ruby-jwt/pull/136) ([excpt](https://github.com/excpt)) +- Fix ruby 1.9.3 compatibility [\#135](https://github.com/jwt/ruby-jwt/pull/135) ([excpt](https://github.com/excpt)) +- iat can be a float value [\#134](https://github.com/jwt/ruby-jwt/pull/134) ([llimllib](https://github.com/llimllib)) + +## [v1.5.3](https://github.com/jwt/ruby-jwt/tree/v1.5.3) (2016-02-24) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.5.2...v1.5.3) + +**Implemented enhancements:** + +- Refactor obsolete code for ruby 1.8 support [\#120](https://github.com/jwt/ruby-jwt/issues/120) +- Fix "Rubocop/Metrics/CyclomaticComplexity" issue in lib/jwt.rb [\#106](https://github.com/jwt/ruby-jwt/issues/106) +- Fix "Rubocop/Metrics/CyclomaticComplexity" issue in lib/jwt.rb [\#105](https://github.com/jwt/ruby-jwt/issues/105) +- Allow a proc to be passed for JTI verification [\#126](https://github.com/jwt/ruby-jwt/pull/126) ([yahooguntu](https://github.com/yahooguntu)) +- Relax restrictions on "jti" claim verification [\#113](https://github.com/jwt/ruby-jwt/pull/113) ([lwe](https://github.com/lwe)) + +**Closed issues:** + +- Verifications not functioning in latest release [\#128](https://github.com/jwt/ruby-jwt/issues/128) +- Base64 is generating invalid length base64 strings - cross language interop [\#127](https://github.com/jwt/ruby-jwt/issues/127) +- Digest::Digest is deprecated; use Digest [\#119](https://github.com/jwt/ruby-jwt/issues/119) +- verify_rsa no method 'verify' for class String [\#115](https://github.com/jwt/ruby-jwt/issues/115) +- Add a changelog [\#111](https://github.com/jwt/ruby-jwt/issues/111) + +**Merged pull requests:** + +- Drop ruby 1.9.3 support [\#131](https://github.com/jwt/ruby-jwt/pull/131) ([excpt](https://github.com/excpt)) +- Allow string hash keys in validation configurations [\#130](https://github.com/jwt/ruby-jwt/pull/130) ([tpickett66](https://github.com/tpickett66)) +- Add ruby 2.3.0 for travis ci testing [\#123](https://github.com/jwt/ruby-jwt/pull/123) ([excpt](https://github.com/excpt)) +- Remove obsolete json code [\#122](https://github.com/jwt/ruby-jwt/pull/122) ([excpt](https://github.com/excpt)) +- Add fancy badges to README.md [\#118](https://github.com/jwt/ruby-jwt/pull/118) ([excpt](https://github.com/excpt)) +- Refactor decode and verify functionality [\#117](https://github.com/jwt/ruby-jwt/pull/117) ([excpt](https://github.com/excpt)) +- Drop echoe dependency for gem releases [\#116](https://github.com/jwt/ruby-jwt/pull/116) ([excpt](https://github.com/excpt)) +- Updated readme for iss/aud options [\#114](https://github.com/jwt/ruby-jwt/pull/114) ([ryanmcilmoyl](https://github.com/ryanmcilmoyl)) +- Fix error misspelling [\#112](https://github.com/jwt/ruby-jwt/pull/112) ([kat3kasper](https://github.com/kat3kasper)) + +## [jwt-1.5.2](https://github.com/jwt/ruby-jwt/tree/jwt-1.5.2) (2015-10-27) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.5.1...jwt-1.5.2) + +**Implemented enhancements:** + +- Must we specify algorithm when calling decode to avoid vulnerabilities? [\#107](https://github.com/jwt/ruby-jwt/issues/107) +- Code review: Rspec test refactoring [\#85](https://github.com/jwt/ruby-jwt/pull/85) ([excpt](https://github.com/excpt)) + +**Fixed bugs:** + +- aud verifies if aud is passed in, :sub does not [\#102](https://github.com/jwt/ruby-jwt/issues/102) +- iat check does not use leeway so nbf could pass, but iat fail [\#83](https://github.com/jwt/ruby-jwt/issues/83) + +**Closed issues:** + +- Test ticket from Code Climate [\#104](https://github.com/jwt/ruby-jwt/issues/104) +- Test ticket from Code Climate [\#100](https://github.com/jwt/ruby-jwt/issues/100) +- Is it possible to decode the payload without validating the signature? [\#97](https://github.com/jwt/ruby-jwt/issues/97) +- What is audience? [\#96](https://github.com/jwt/ruby-jwt/issues/96) +- Options hash uses both symbols and strings as keys. [\#95](https://github.com/jwt/ruby-jwt/issues/95) + +**Merged pull requests:** + +- Fix incorrect `iat` examples [\#109](https://github.com/jwt/ruby-jwt/pull/109) ([kjwierenga](https://github.com/kjwierenga)) +- Update docs to include instructions for the algorithm parameter. [\#108](https://github.com/jwt/ruby-jwt/pull/108) ([aarongray](https://github.com/aarongray)) +- make sure :sub check behaves like :aud check [\#103](https://github.com/jwt/ruby-jwt/pull/103) ([skippy](https://github.com/skippy)) +- Change hash syntax [\#101](https://github.com/jwt/ruby-jwt/pull/101) ([excpt](https://github.com/excpt)) +- Include LICENSE and README.md in gem [\#99](https://github.com/jwt/ruby-jwt/pull/99) ([bkeepers](https://github.com/bkeepers)) +- Remove unused variable in the sample code. [\#98](https://github.com/jwt/ruby-jwt/pull/98) ([hypermkt](https://github.com/hypermkt)) +- Fix iat claim example [\#94](https://github.com/jwt/ruby-jwt/pull/94) ([larrylv](https://github.com/larrylv)) +- Fix wrong description in README.md [\#93](https://github.com/jwt/ruby-jwt/pull/93) ([larrylv](https://github.com/larrylv)) +- JWT and JWA are now RFC. [\#92](https://github.com/jwt/ruby-jwt/pull/92) ([aj-michael](https://github.com/aj-michael)) +- Update README.md [\#91](https://github.com/jwt/ruby-jwt/pull/91) ([nsarno](https://github.com/nsarno)) +- Fix missing verify parameter in docs [\#90](https://github.com/jwt/ruby-jwt/pull/90) ([ernie](https://github.com/ernie)) +- Iat check uses leeway. [\#89](https://github.com/jwt/ruby-jwt/pull/89) ([aj-michael](https://github.com/aj-michael)) +- nbf check allows exact time matches. [\#88](https://github.com/jwt/ruby-jwt/pull/88) ([aj-michael](https://github.com/aj-michael)) + +## [jwt-1.5.1](https://github.com/jwt/ruby-jwt/tree/jwt-1.5.1) (2015-06-22) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.5.0...jwt-1.5.1) + +**Implemented enhancements:** + +- Fix either README or source code [\#78](https://github.com/jwt/ruby-jwt/issues/78) +- Validate against draft 20 [\#38](https://github.com/jwt/ruby-jwt/issues/38) + +**Fixed bugs:** + +- ECDSA signature verification fails for valid tokens [\#84](https://github.com/jwt/ruby-jwt/issues/84) +- Shouldn't verification of additional claims, like iss, aud etc. be enforced when in options? [\#81](https://github.com/jwt/ruby-jwt/issues/81) +- decode fails with 'none' algorithm and verify [\#75](https://github.com/jwt/ruby-jwt/issues/75) + +**Closed issues:** + +- Doc mismatch: uninitialized constant JWT::ExpiredSignature [\#79](https://github.com/jwt/ruby-jwt/issues/79) +- TypeError when specifying a wrong algorithm [\#77](https://github.com/jwt/ruby-jwt/issues/77) +- jti verification doesn't prevent replays [\#73](https://github.com/jwt/ruby-jwt/issues/73) + +**Merged pull requests:** + +- Correctly sign ECDSA JWTs [\#87](https://github.com/jwt/ruby-jwt/pull/87) ([jurriaan](https://github.com/jurriaan)) +- fixed results of decoded tokens in readme [\#86](https://github.com/jwt/ruby-jwt/pull/86) ([piscolomo](https://github.com/piscolomo)) +- Force verification of "iss" and "aud" claims [\#82](https://github.com/jwt/ruby-jwt/pull/82) ([lwe](https://github.com/lwe)) + +## [jwt-1.5.0](https://github.com/jwt/ruby-jwt/tree/jwt-1.5.0) (2015-05-09) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.4.1...jwt-1.5.0) + +**Implemented enhancements:** + +- Needs to support asymmetric key signatures over shared secrets [\#46](https://github.com/jwt/ruby-jwt/issues/46) +- Implement Elliptic Curve Crypto Signatures [\#74](https://github.com/jwt/ruby-jwt/pull/74) ([jtdowney](https://github.com/jtdowney)) +- Add an option to verify the signature on decode [\#71](https://github.com/jwt/ruby-jwt/pull/71) ([javawizard](https://github.com/javawizard)) + +**Closed issues:** + +- Check JWT vulnerability [\#76](https://github.com/jwt/ruby-jwt/issues/76) + +**Merged pull requests:** + +- Fixed some examples to make them copy-pastable [\#72](https://github.com/jwt/ruby-jwt/pull/72) ([jer](https://github.com/jer)) + +## [jwt-1.4.1](https://github.com/jwt/ruby-jwt/tree/jwt-1.4.1) (2015-03-12) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.4.0...jwt-1.4.1) + +**Fixed bugs:** + +- jti verification not working per the spec [\#68](https://github.com/jwt/ruby-jwt/issues/68) +- Verify ISS should be off by default [\#66](https://github.com/jwt/ruby-jwt/issues/66) + +**Merged pull requests:** + +- Fix \#66 \#68 [\#69](https://github.com/jwt/ruby-jwt/pull/69) ([excpt](https://github.com/excpt)) +- When throwing errors, mention expected/received values [\#65](https://github.com/jwt/ruby-jwt/pull/65) ([rolodato](https://github.com/rolodato)) + +## [jwt-1.4.0](https://github.com/jwt/ruby-jwt/tree/jwt-1.4.0) (2015-03-10) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.3.0...jwt-1.4.0) + +**Closed issues:** + +- The behavior using 'json' differs from 'multi_json' [\#41](https://github.com/jwt/ruby-jwt/issues/41) + +**Merged pull requests:** + +- Release 1.4.0 [\#64](https://github.com/jwt/ruby-jwt/pull/64) ([excpt](https://github.com/excpt)) +- Update README.md and remove dead code [\#63](https://github.com/jwt/ruby-jwt/pull/63) ([excpt](https://github.com/excpt)) +- Add 'iat/ aud/ sub/ jti' support for ruby-jwt [\#62](https://github.com/jwt/ruby-jwt/pull/62) ([ZhangHanDong](https://github.com/ZhangHanDong)) +- Add 'iss' support for ruby-jwt [\#61](https://github.com/jwt/ruby-jwt/pull/61) ([ZhangHanDong](https://github.com/ZhangHanDong)) +- Clarify .encode API in README [\#60](https://github.com/jwt/ruby-jwt/pull/60) ([jbodah](https://github.com/jbodah)) + +## [jwt-1.3.0](https://github.com/jwt/ruby-jwt/tree/jwt-1.3.0) (2015-02-24) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.2.1...jwt-1.3.0) + +**Closed issues:** + +- Signature Verification to Return Verification Error rather than decode error [\#57](https://github.com/jwt/ruby-jwt/issues/57) +- Incorrect readme for leeway [\#55](https://github.com/jwt/ruby-jwt/issues/55) +- What is the reason behind stripping the = in base64 encoding? [\#54](https://github.com/jwt/ruby-jwt/issues/54) +- Preperations for version 2.x [\#50](https://github.com/jwt/ruby-jwt/issues/50) +- Release a new version [\#47](https://github.com/jwt/ruby-jwt/issues/47) +- Catch up for ActiveWhatever 4.1.1 series [\#40](https://github.com/jwt/ruby-jwt/issues/40) + +**Merged pull requests:** + +- raise verification error for signiture verification [\#58](https://github.com/jwt/ruby-jwt/pull/58) ([punkle](https://github.com/punkle)) +- Added support for not before claim verification [\#56](https://github.com/jwt/ruby-jwt/pull/56) ([punkle](https://github.com/punkle)) + +## [jwt-1.2.1](https://github.com/jwt/ruby-jwt/tree/jwt-1.2.1) (2015-01-22) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.2.0...jwt-1.2.1) + +**Closed issues:** + +- JWT.encode\({"exp": 10}, "secret"\) [\#52](https://github.com/jwt/ruby-jwt/issues/52) +- JWT.encode\({"exp": 10}, "secret"\) [\#51](https://github.com/jwt/ruby-jwt/issues/51) + +**Merged pull requests:** + +- Accept expiration claims as string [\#53](https://github.com/jwt/ruby-jwt/pull/53) ([yarmand](https://github.com/yarmand)) + +## [jwt-1.2.0](https://github.com/jwt/ruby-jwt/tree/jwt-1.2.0) (2014-11-24) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.13...jwt-1.2.0) + +**Closed issues:** + +- set token to expire [\#42](https://github.com/jwt/ruby-jwt/issues/42) + +**Merged pull requests:** + +- Added support for `exp` claim [\#45](https://github.com/jwt/ruby-jwt/pull/45) ([zshannon](https://github.com/zshannon)) +- rspec 3 breaks passing tests [\#44](https://github.com/jwt/ruby-jwt/pull/44) ([zshannon](https://github.com/zshannon)) + +## [jwt-0.1.13](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.13) (2014-05-08) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-1.0.0...jwt-0.1.13) + +**Closed issues:** + +- yanking of version 0.1.12 causes issues [\#39](https://github.com/jwt/ruby-jwt/issues/39) +- Semantic versioning [\#37](https://github.com/jwt/ruby-jwt/issues/37) +- Update gem to get latest changes [\#36](https://github.com/jwt/ruby-jwt/issues/36) + +## [jwt-1.0.0](https://github.com/jwt/ruby-jwt/tree/jwt-1.0.0) (2014-05-07) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.11...jwt-1.0.0) + +**Closed issues:** + +- API request - JWT::decoded_header\(\) [\#26](https://github.com/jwt/ruby-jwt/issues/26) + +**Merged pull requests:** + +- return header along with playload after decoding [\#35](https://github.com/jwt/ruby-jwt/pull/35) ([sawyerzhang](https://github.com/sawyerzhang)) +- Raise JWT::DecodeError on nil token [\#34](https://github.com/jwt/ruby-jwt/pull/34) ([tjmw](https://github.com/tjmw)) +- Make MultiJson optional for Ruby 1.9+ [\#33](https://github.com/jwt/ruby-jwt/pull/33) ([petergoldstein](https://github.com/petergoldstein)) +- Allow access to header and payload without signature verification [\#32](https://github.com/jwt/ruby-jwt/pull/32) ([petergoldstein](https://github.com/petergoldstein)) +- Update specs to use RSpec 3.0.x syntax [\#31](https://github.com/jwt/ruby-jwt/pull/31) ([petergoldstein](https://github.com/petergoldstein)) +- Travis - Add Ruby 2.0.0, 2.1.0, Rubinius [\#30](https://github.com/jwt/ruby-jwt/pull/30) ([petergoldstein](https://github.com/petergoldstein)) + +## [jwt-0.1.11](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.11) (2014-01-17) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.10...jwt-0.1.11) + +**Closed issues:** + +- url safe encode and decode [\#28](https://github.com/jwt/ruby-jwt/issues/28) +- Release [\#27](https://github.com/jwt/ruby-jwt/issues/27) + +**Merged pull requests:** + +- fixed urlsafe base64 encoding [\#29](https://github.com/jwt/ruby-jwt/pull/29) ([tobscher](https://github.com/tobscher)) + +## [jwt-0.1.10](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.10) (2014-01-10) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.8...jwt-0.1.10) + +**Closed issues:** + +- change to signature of JWT.decode method [\#14](https://github.com/jwt/ruby-jwt/issues/14) + +**Merged pull requests:** + +- Fix warning: assigned but unused variable - e [\#25](https://github.com/jwt/ruby-jwt/pull/25) ([sferik](https://github.com/sferik)) +- Echoe doesn't define a license= method [\#24](https://github.com/jwt/ruby-jwt/pull/24) ([sferik](https://github.com/sferik)) +- Use OpenSSL::Digest instead of deprecated OpenSSL::Digest::Digest [\#23](https://github.com/jwt/ruby-jwt/pull/23) ([JuanitoFatas](https://github.com/JuanitoFatas)) +- Handle some invalid JWTs [\#22](https://github.com/jwt/ruby-jwt/pull/22) ([steved](https://github.com/steved)) +- Add MIT license to gemspec [\#21](https://github.com/jwt/ruby-jwt/pull/21) ([nycvotes-dev](https://github.com/nycvotes-dev)) +- Tweaks and improvements [\#20](https://github.com/jwt/ruby-jwt/pull/20) ([threedaymonk](https://github.com/threedaymonk)) +- Don't leave errors in OpenSSL.errors when there is a decoding error. [\#19](https://github.com/jwt/ruby-jwt/pull/19) ([lowellk](https://github.com/lowellk)) + +## [jwt-0.1.8](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.8) (2013-03-14) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.7...jwt-0.1.8) + +**Merged pull requests:** + +- Contrib and update [\#18](https://github.com/jwt/ruby-jwt/pull/18) ([threedaymonk](https://github.com/threedaymonk)) +- Verify if verify is truthy \(not just true\) [\#17](https://github.com/jwt/ruby-jwt/pull/17) ([threedaymonk](https://github.com/threedaymonk)) + +## [jwt-0.1.7](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.7) (2013-03-07) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.6...jwt-0.1.7) + +**Merged pull requests:** + +- Catch MultiJson::LoadError and reraise as JWT::DecodeError [\#16](https://github.com/jwt/ruby-jwt/pull/16) ([rwygand](https://github.com/rwygand)) + +## [jwt-0.1.6](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.6) (2013-03-05) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.5...jwt-0.1.6) + +**Merged pull requests:** + +- Fixes a theoretical timing attack [\#15](https://github.com/jwt/ruby-jwt/pull/15) ([mgates](https://github.com/mgates)) +- Use StandardError as parent for DecodeError [\#13](https://github.com/jwt/ruby-jwt/pull/13) ([Oscil8](https://github.com/Oscil8)) + +## [jwt-0.1.5](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.5) (2012-07-20) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.4...jwt-0.1.5) + +**Closed issues:** + +- Unable to specify signature header fields [\#7](https://github.com/jwt/ruby-jwt/issues/7) + +**Merged pull requests:** + +- MultiJson dependency uses ~\> but should be \>= [\#12](https://github.com/jwt/ruby-jwt/pull/12) ([sporkmonger](https://github.com/sporkmonger)) +- Oops. :-\) [\#11](https://github.com/jwt/ruby-jwt/pull/11) ([sporkmonger](https://github.com/sporkmonger)) +- Fix issue with signature verification in JRuby [\#10](https://github.com/jwt/ruby-jwt/pull/10) ([sporkmonger](https://github.com/sporkmonger)) +- Depend on MultiJson [\#9](https://github.com/jwt/ruby-jwt/pull/9) ([lautis](https://github.com/lautis)) +- Allow for custom headers on encode and decode [\#8](https://github.com/jwt/ruby-jwt/pull/8) ([dgrijalva](https://github.com/dgrijalva)) +- Missing development dependency for echoe gem. [\#6](https://github.com/jwt/ruby-jwt/pull/6) ([sporkmonger](https://github.com/sporkmonger)) + +## [jwt-0.1.4](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.4) (2011-11-11) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/jwt-0.1.3...jwt-0.1.4) + +**Merged pull requests:** + +- Fix for RSA verification [\#5](https://github.com/jwt/ruby-jwt/pull/5) ([jordan-brough](https://github.com/jordan-brough)) + +## [jwt-0.1.3](https://github.com/jwt/ruby-jwt/tree/jwt-0.1.3) (2011-06-30) + +[Full Changelog](https://github.com/jwt/ruby-jwt/compare/10d7492ea325c65fce41191c73cd90d4de494772...jwt-0.1.3) + +**Closed issues:** + +- signatures calculated incorrectly \(hexdigest instead of digest\) [\#1](https://github.com/jwt/ruby-jwt/issues/1) + +**Merged pull requests:** + +- Bumped a version and added a .gemspec using rake build_gemspec [\#3](https://github.com/jwt/ruby-jwt/pull/3) ([zhitomirskiyi](https://github.com/zhitomirskiyi)) +- Added RSA support [\#2](https://github.com/jwt/ruby-jwt/pull/2) ([zhitomirskiyi](https://github.com/zhitomirskiyi)) diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CODE_OF_CONDUCT.md b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..1d65f7aec --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CODE_OF_CONDUCT.md @@ -0,0 +1,84 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or + advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email + address, without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at . All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of actions. + +**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, +available at [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html). + +Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq](https://www.contributor-covenant.org/faq). Translations are available at [https://www.contributor-covenant.org/translations](https://www.contributor-covenant.org/translations). diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CONTRIBUTING.md b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CONTRIBUTING.md new file mode 100644 index 000000000..e163e4af4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/CONTRIBUTING.md @@ -0,0 +1,98 @@ +# Contributing to [ruby-jwt](https://github.com/jwt/ruby-jwt) + +## Forking the project + +Fork the project on GitHub and clone your own fork. Instuctions on forking can be found from the [GitHub Docs](https://docs.github.com/en/get-started/quickstart/fork-a-repo) + +```bash +git clone git@github.com:you/ruby-jwt.git +cd ruby-jwt +git remote add upstream https://github.com/jwt/ruby-jwt +``` + +## Create a branch for your implementation + +Make sure you have the latest upstream main branch of the project. + +```bash +git fetch --all +git checkout main +git rebase upstream/main +git push origin main +git checkout -b fix-a-little-problem +``` + +## Running the tests and linter + +Before you start with your implementation make sure you are able to get a successful test run with the current revision. + +The tests are written with rspec and [Appraisal](https://github.com/thoughtbot/appraisal) is used to ensure compatibility with 3rd party dependencies providing cryptographic features. + +[Rubocop](https://github.com/rubocop/rubocop) is used to enforce the Ruby style. + +To run the complete set of tests and linter run the following + +```bash +bundle install +bundle exec appraisal rake test +bundle exec rubocop +``` + +## Implement your feature + +Implement tests and your change. Don't be shy adding a little something in the [README](README.md). +Add a short description of the change in either the `Features` or `Fixes` section in the [CHANGELOG](CHANGELOG.md) file. + +The form of the row (You need to return to the row when you know the pull request id) + +```markdown +- Fix a little problem [#123](https://github.com/jwt/ruby-jwt/pull/123) - [@you](https://github.com/you). +``` + +## Push your branch and create a pull request + +Before pushing make sure the tests pass and RuboCop is happy. + +```bash +bundle exec appraisal rake test +bundle exec rubocop +git push origin fix-a-little-problem +``` + +Make a new pull request on the [ruby-jwt project](https://github.com/jwt/ruby-jwt/pulls) with a description what the change is about. + +## Update the CHANGELOG, again + +Update the [CHANGELOG](CHANGELOG.md) with the pull request id from the previous step. + +You can amend the previous commit with the updated changelog change and force push your branch. The PR will get automatically updated. + +```bash +git add CHANGELOG.md +git commit --amend --no-edit +git push origin fix-a-little-problem -f +``` + +## Keep an eye on your pull request + +A maintainer will review and probably merge you changes when time allows, be patient. + +## Keeping your branch up-to-date + +It's recommended that you keep your branch up-to-date by rebasing to the upstream main. + +```bash +git fetch upstream +git checkout fix-a-little-problem +git rebase upstream/main +git push origin fix-a-little-problem -f +``` + +## Releasing a new version + +The version is using the [Semantic Versioning](http://semver.org/) and the version is located in the [version.rb](lib/jwt/version.rb) file. +Also update the [CHANGELOG](CHANGELOG.md) to reflect the upcoming version release. + +```bash +rake release +``` diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/LICENSE b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/LICENSE new file mode 100644 index 000000000..927c375f4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/LICENSE @@ -0,0 +1,7 @@ +Copyright (c) 2011 Jeff Lindsay + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/README.md b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/README.md new file mode 100644 index 000000000..0e8122edc --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/README.md @@ -0,0 +1,782 @@ +# JWT + +[![Gem Version](https://badge.fury.io/rb/jwt.svg)](https://badge.fury.io/rb/jwt) +[![Build Status](https://github.com/jwt/ruby-jwt/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/jwt/ruby-jwt/actions) +[![Maintainability](https://qlty.sh/badges/6f61c5a6-6e23-41a7-8896-a3ce8b006655/maintainability.svg)](https://qlty.sh/gh/jwt/projects/ruby-jwt) +[![Code Coverage](https://qlty.sh/badges/6f61c5a6-6e23-41a7-8896-a3ce8b006655/test_coverage.svg)](https://qlty.sh/gh/jwt/projects/ruby-jwt) + +A ruby implementation of the [RFC 7519 OAuth JSON Web Token (JWT)](https://tools.ietf.org/html/rfc7519) standard. + +If you have further questions related to development or usage, join us: [ruby-jwt google group](https://groups.google.com/forum/#!forum/ruby-jwt). + +See [CHANGELOG.md](CHANGELOG.md) for a complete set of changes and [upgrade guide](UPGRADING.md) for upgrading between major versions. + +## Sponsors + +| Logo | Message | +| ---------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| ![auth0 logo](https://user-images.githubusercontent.com/83319/31722733-de95bbde-b3ea-11e7-96bf-4f4e8f915588.png) | If you want to quickly add secure token-based authentication to Ruby projects, feel free to check Auth0's Ruby SDK and free plan at [auth0.com/developers](https://auth0.com/developers?utm_source=GHsponsor&utm_medium=GHsponsor&utm_campaign=rubyjwt&utm_content=auth) | + +## Installing + +### Using Rubygems + +```bash +gem install jwt +``` + +### Using Bundler + +Add the following to your Gemfile + +```bash +gem 'jwt' +``` + +And run `bundle install` + +Finally require the gem in your application + +```ruby +require 'jwt' +``` + +## Algorithms and Usage + +The jwt gem natively supports the NONE, HMAC, RSASSA, ECDSA and RSASSA-PSS algorithms via the openssl library. The gem can be extended with additional or alternative implementations of the algorithms via extensions. + +Additionally the EdDSA algorithm is supported via a the [jwt-eddsa gem](https://rubygems.org/gems/jwt-eddsa). + +For safe cryptographic signing, you need to specify the algorithm in the options hash whenever you call `JWT.decode` to ensure that an attacker [cannot bypass the algorithm verification step](https://auth0.com/blog/critical-vulnerabilities-in-json-web-token-libraries/). **It is strongly recommended that you hard code the algorithm, as you may leave yourself vulnerable by dynamically picking the algorithm** + +See [JSON Web Algorithms (JWA) 3.1. "alg" (Algorithm) Header Parameter Values for JWS](https://tools.ietf.org/html/rfc7518#section-3.1) + +### **NONE** + +- none - unsigned token + +```ruby +payload = { data: 'test' } +token = JWT.encode(payload, nil, 'none') +# => "eyJhbGciOiJub25lIn0.eyJkYXRhIjoidGVzdCJ9." + +decoded_token = JWT.decode(token, nil, true, { algorithm: 'none' }) +# => [ +# {"data"=>"test"}, # payload +# {"alg"=>"none"} # header +# ] +``` + +### **HMAC** + +- HS256 - HMAC using SHA-256 hash algorithm +- HS384 - HMAC using SHA-384 hash algorithm +- HS512 - HMAC using SHA-512 hash algorithm + +```ruby +payload = { data: 'test' } +hmac_secret = 'my$ecretK3y' + +token = JWT.encode(payload, hmac_secret, 'HS256') +# => "eyJhbGciOiJIUzI1NiJ9.eyJkYXRhIjoidGVzdCJ9.pNIWIL34Jo13LViZAJACzK6Yf0qnvT_BuwOxiMCPE-Y" + +decoded_token = JWT.decode(token, hmac_secret, true, { algorithm: 'HS256' }) +# => [ +# {"data"=>"test"}, # payload +# {"alg"=>"HS256"} # header +# ] +``` + +### **RSA** + +- RS256 - RSA using SHA-256 hash algorithm +- RS384 - RSA using SHA-384 hash algorithm +- RS512 - RSA using SHA-512 hash algorithm + +```ruby +payload = { data: 'test' } +rsa_private = OpenSSL::PKey::RSA.generate(2048) +rsa_public = rsa_private.public_key + +token = JWT.encode(payload, rsa_private, 'RS256') +# => "eyJhbGciOiJSUzI1NiJ9.eyJkYXRhIjoidGVzdCJ9.CCkO35qFPijW8Gwhbt8a80PB9fc9FJ19hCMnXSgoDF6Mlvlt0A4G-ah..." + +decoded_token = JWT.decode(token, rsa_public, true, { algorithm: 'RS256' }) +# => [ +# {"data"=>"test"}, # payload +# {"alg"=>"RS256"} # header +# ] +``` + +### **ECDSA** + +- ES256 - ECDSA using P-256 and SHA-256 +- ES384 - ECDSA using P-384 and SHA-384 +- ES512 - ECDSA using P-521 and SHA-512 +- ES256K - ECDSA using P-256K and SHA-256 + +```ruby +payload = { data: 'test' } +ecdsa_key = OpenSSL::PKey::EC.generate('prime256v1') + +token = JWT.encode(payload, ecdsa_key, 'ES256') +# => "eyJhbGciOiJFUzI1NiJ9.eyJkYXRhIjoidGVzdCJ9.AlLW--kaF7EX1NMX9WJRuIW8NeRJbn2BLXHns7Q5TZr7Hy3lF6MOpMlp7GoxBFRLISQ6KrD0CJOrR8aogEsPeg" + +decoded_token = JWT.decode(token, ecdsa_key, true, { algorithm: 'ES256' }) +# => [ +# {"test"=>"data"}, # payload +# {"alg"=>"ES256"} # header +# ] +``` + +### **EdDSA** + +Since version 3.0, the EdDSA algorithm has been moved to the [jwt-eddsa gem](https://rubygems.org/gems/jwt-eddsa). + +### **RSASSA-PSS** + +- PS256 - RSASSA-PSS using SHA-256 hash algorithm +- PS384 - RSASSA-PSS using SHA-384 hash algorithm +- PS512 - RSASSA-PSS using SHA-512 hash algorithm + +```ruby +payload = { data: 'test' } +rsa_private = OpenSSL::PKey::RSA.generate(2048) +rsa_public = rsa_private.public_key + +token = JWT.encode(payload, rsa_private, 'PS256') +# => "eyJhbGciOiJQUzI1NiJ9.eyJkYXRhIjoidGVzdCJ9.BRWizdUjD5zAWw-EDBcrl3dDpQDAePz9Ol3XKC43SggU47G8OWwveA_..." + +decoded_token = JWT.decode(token, rsa_public, true, { algorithm: 'PS256' }) +# => [ +# {"data"=>"test"}, # payload +# {"alg"=>"PS256"} # header +# ] +``` + +### **Custom algorithms** + +When encoding or decoding a token, you can pass in a custom object through the `algorithm` option to handle signing or verification. This custom object must include or extend the `JWT::JWA::SigningAlgorithm` module and implement certain methods: + +- For decoding/verifying: The object must implement the methods `alg` and `verify`. +- For encoding/signing: The object must implement the methods `alg` and `sign`. + +For customization options check the details from `JWT::JWA::SigningAlgorithm`. + +```ruby +module CustomHS512Algorithm + extend JWT::JWA::SigningAlgorithm + + def self.alg + 'HS512' + end + + def self.sign(data:, signing_key:) + OpenSSL::HMAC.digest(OpenSSL::Digest.new('sha512'), signing_key, data) + end + + def self.verify(data:, signature:, verification_key:) + ::OpenSSL.secure_compare(sign(data: data, signing_key: verification_key), signature) + end +end + +payload = { data: 'test' } +token = JWT.encode(payload, 'secret', CustomHS512Algorithm) +# => "eyJhbGciOiJIUzUxMiJ9.eyJkYXRhIjoidGVzdCJ9.aBNoejLEM2WMF3TxzRDKlehYdG2ATvFpGNauTI4GSD2VJseS_sC8covrVMlgslf0aJM4SKb3EIeORJBFPtZ33w" + +decoded_token = JWT.decode(token, 'secret', true, algorithm: CustomHS512Algorithm) +# => [ +# {"data"=>"test"}, # payload +# {"alg"=>"HS512"} # header +# ] +``` + +### Add custom header fields + +The ruby-jwt gem supports custom [header fields](https://tools.ietf.org/html/rfc7519#section-5) +To add custom header fields you need to pass `header_fields` parameter + +```ruby +payload = { data: 'test' } + +token = JWT.encode(payload, nil, 'none', { typ: 'JWT' }) +# => "eyJ0eXAiOiJKV1QiLCJhbGciOiJub25lIn0.eyJkYXRhIjoidGVzdCJ9." + +decoded_token = JWT.decode(token, nil, true, { algorithm: 'none' }) +# => [ +# {"data"=>"test"}, # payload +# {"typ"=>"JWT", "alg"=>"none"} # header +# ] +``` + +## `JWT::Token` and `JWT::EncodedToken` + +The `JWT::Token` and `JWT::EncodedToken` classes can be used to manage your JWTs. + +### Signing and encoding a token + +```ruby +payload = { exp: Time.now.to_i + 60, jti: '1234', sub: "my-subject" } +header = { kid: 'hmac' } + +token = JWT::Token.new(payload: payload, header: header) +token.sign!(algorithm: 'HS256', key: "secret") + +token.jwt +# => "eyJraWQiOiJobWFjIiwiYWxnIjoiSFMyNTYifQ.eyJleHAiOjE3NTAwMDU0NzksImp0aSI6IjEyMzQiLCJzdWIiOiJteS1zdWJqZWN0In0.NRLcK6fYr3IdNfmncJePMWLQ34M4n14EgqSYrQIjL9w" +``` + +### Verifying and decoding a token + +The `JWT::EncodedToken` can be used as a token object that allows verification of signatures and claims. + +```ruby +encoded_token = JWT::EncodedToken.new(token.jwt) + +encoded_token.verify_signature!(algorithm: 'HS256', key: "secret") +encoded_token.verify_signature!(algorithm: 'HS256', key: "wrong_secret") # raises JWT::VerificationError +encoded_token.verify_claims!(:exp, :jti) +encoded_token.verify_claims!(sub: ["not-my-subject"]) # raises JWT::InvalidSubError +encoded_token.claim_errors(sub: ["not-my-subject"]).map(&:message) # => ["Invalid subject. Expected [\"not-my-subject\"], received my-subject"] +encoded_token.payload # => { 'exp'=>1234, 'jti'=>'1234", 'sub'=>'my-subject' } +encoded_token.header # {'kid'=>'hmac', 'alg'=>'HS256'} +``` + +The `JWT::EncodedToken#verify!` method can be used to verify signature and claim verification in one go. The `exp` claim is verified by default. + +```ruby +encoded_token = JWT::EncodedToken.new(token.jwt) +encoded_token.verify!(signature: {algorithm: 'HS256', key: "secret"}) +encoded_token.payload # => { 'exp'=>1234, 'jti'=>'1234", 'sub'=>'my-subject' } +encoded_token.header # {'kid'=>'hmac', 'alg'=>'HS256'} +``` + +A JWK can be used to sign and verify the token if it's possible to derive the signing algorithm from the key. + +```ruby +jwk_json = '{ + "kty": "oct", + "k": "c2VjcmV0", + "alg": "HS256", + "kid": "hmac" +}' + +jwk = JWT::JWK.import(JSON.parse(jwk_json)) + +token = JWT::Token.new(payload: payload, header: header) + +token.sign!(key: jwk, algorithm: 'HS256') + +encoded_token = JWT::EncodedToken.new(token.jwt) +encoded_token.verify!(signature: { algorithm: ["HS256", "HS512"], key: jwk}) +``` + +#### Using a keyfinder + +A keyfinder can be used to verify a signature. A keyfinder is an object responding to the `#call` method. The method expects to receive one argument, which is the token to be verified. + +An example on using the built-in JWK keyfinder. + +```ruby +# Create and sign a token +jwk = JWT::JWK.new(OpenSSL::PKey::RSA.generate(2048)) +token = JWT::Token.new(payload: { pay: 'load' }, header: { kid: jwk.kid }) +token.sign!(algorithm: 'RS256', key: jwk.signing_key) + +# Create keyfinder object, verify and decode token +key_finder = JWT::JWK::KeyFinder.new(jwks: JWT::JWK::Set.new(jwk)) +encoded_token = JWT::EncodedToken.new(token.jwt) +encoded_token.verify!(signature: { algorithm: 'RS256', key_finder: key_finder}) +encoded_token.payload # => { 'pay' => 'load' } +``` + +Using a custom keyfinder proc. + +```ruby +# Create and sign a token +key = OpenSSL::PKey::RSA.generate(2048) +token = JWT::Token.new(payload: { pay: 'load' }) +token.sign!(algorithm: 'RS256', key: key) + +# Verify and decode token +encoded_token = JWT::EncodedToken.new(token.jwt) +encoded_token.verify!(signature: { algorithm: 'RS256', key_finder: ->(_token){ key.public_key }}) +encoded_token.payload # => { 'pay' => 'load' } +``` + +### Detached payload + +The `::JWT::Token#detach_payload!` method can be use to detach the payload from the JWT. + +```ruby +token = JWT::Token.new(payload: { pay: 'load' }) +token.sign!(algorithm: 'HS256', key: "secret") +token.detach_payload! +token.jwt # => "eyJhbGciOiJIUzI1NiJ9..UEhDY1Qlj29ammxuVRA_-gBah4qTy5FngIWg0yEAlC0" +token.encoded_payload # => "eyJwYXkiOiJsb2FkIn0" +``` + +The `JWT::EncodedToken` class can be used to decode a token with a detached payload by providing the payload to the token instance in separate. + +```ruby +encoded_token = JWT::EncodedToken.new(token.jwt) +encoded_token.encoded_payload = "eyJwYXkiOiJsb2FkIn0" +encoded_token.verify_signature!(algorithm: 'HS256', key: "secret") +encoded_token.payload # => {"pay"=>"load"} +``` + +## Claims + +JSON Web Token defines some reserved claim names and defines how they should be +used. JWT supports these reserved claim names: + +- 'exp' (Expiration Time) Claim +- 'nbf' (Not Before Time) Claim +- 'iss' (Issuer) Claim +- 'aud' (Audience) Claim +- 'jti' (JWT ID) Claim +- 'iat' (Issued At) Claim +- 'sub' (Subject) Claim + +### Expiration Time Claim + +From [Oauth JSON Web Token 4.1.4. "exp" (Expiration Time) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.4): + +> The `exp` (expiration time) claim identifies the expiration time on or after which the JWT MUST NOT be accepted for processing. The processing of the `exp` claim requires that the current date/time MUST be before the expiration date/time listed in the `exp` claim. Implementers MAY provide for some small `leeway`, usually no more than a few minutes, to account for clock skew. Its value MUST be a number containing a **_NumericDate_** value. Use of this claim is OPTIONAL. + +```ruby +exp = Time.now.to_i + 4 * 3600 +exp_payload = { data: 'data', exp: exp } + +token = JWT.encode(exp_payload, hmac_secret, 'HS256') + +begin + decoded_token = JWT.decode(token, hmac_secret, true, { algorithm: 'HS256' }) +rescue JWT::ExpiredSignature + # Handle expired token, e.g. logout user or deny access +end +``` + +The Expiration Claim verification can be disabled. + +```ruby +# Decode token without raising JWT::ExpiredSignature error +JWT.decode(token, hmac_secret, true, { verify_expiration: false, algorithm: 'HS256' }) +``` + +Leeway and the exp claim. + +```ruby +exp = Time.now.to_i - 10 +leeway = 30 # seconds + +exp_payload = { data: 'data', exp: exp } + +# build expired token +token = JWT.encode(exp_payload, hmac_secret, 'HS256') + +begin + # add leeway to ensure the token is still accepted + decoded_token = JWT.decode(token, hmac_secret, true, { exp_leeway: leeway, algorithm: 'HS256' }) +rescue JWT::ExpiredSignature + # Handle expired token, e.g. logout user or deny access +end +``` + +### Not Before Time Claim + +From [Oauth JSON Web Token 4.1.5. "nbf" (Not Before) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.5): + +> The `nbf` (not before) claim identifies the time before which the JWT MUST NOT be accepted for processing. The processing of the `nbf` claim requires that the current date/time MUST be after or equal to the not-before date/time listed in the `nbf` claim. Implementers MAY provide for some small `leeway`, usually no more than a few minutes, to account for clock skew. Its value MUST be a number containing a **_NumericDate_** value. Use of this claim is OPTIONAL. + +```ruby +nbf = Time.now.to_i - 3600 +nbf_payload = { data: 'data', nbf: nbf } + +token = JWT.encode(nbf_payload, hmac_secret, 'HS256') + +begin + decoded_token = JWT.decode(token, hmac_secret, true, { algorithm: 'HS256' }) +rescue JWT::ImmatureSignature + # Handle invalid token, e.g. logout user or deny access +end +``` + +The Not Before Claim verification can be disabled. + +```ruby +# Decode token without raising JWT::ImmatureSignature error +JWT.decode(token, hmac_secret, true, { verify_not_before: false, algorithm: 'HS256' }) +``` + +Leeway and the nbf claim. + +```ruby +nbf = Time.now.to_i + 10 +leeway = 30 + +nbf_payload = { data: 'data', nbf: nbf } + +# build expired token +token = JWT.encode(nbf_payload, hmac_secret, 'HS256') + +begin + # add leeway to ensure the token is valid + decoded_token = JWT.decode(token, hmac_secret, true, { nbf_leeway: leeway, algorithm: 'HS256' }) +rescue JWT::ImmatureSignature + # Handle invalid token, e.g. logout user or deny access +end +``` + +### Issuer Claim + +From [Oauth JSON Web Token 4.1.1. "iss" (Issuer) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.1): + +> The `iss` (issuer) claim identifies the principal that issued the JWT. The processing of this claim is generally application specific. The `iss` value is a case-sensitive string containing a **_StringOrURI_** value. Use of this claim is OPTIONAL. + +You can pass multiple allowed issuers as an Array, verification will pass if one of them matches the `iss` value in the payload. + +```ruby +iss = 'My Awesome Company Inc. or https://my.awesome.website/' +iss_payload = { data: 'data', iss: iss } + +token = JWT.encode(iss_payload, hmac_secret, 'HS256') + +begin + # Add iss to the validation to check if the token has been manipulated + decoded_token = JWT.decode(token, hmac_secret, true, { iss: iss, verify_iss: true, algorithm: 'HS256' }) +rescue JWT::InvalidIssuerError + # Handle invalid token, e.g. logout user or deny access +end +``` + +You can also pass a Regexp or Proc (with arity 1), verification will pass if the regexp matches or the proc returns truthy. +On supported ruby versions (>= 2.5) you can also delegate to methods, on older versions you will have +to convert them to proc (using `to_proc`) + +```ruby +JWT.decode(token, hmac_secret, true, + iss: %r'https://my.awesome.website/', + verify_iss: true, + algorithm: 'HS256') +``` + +```ruby +JWT.decode(token, hmac_secret, true, + iss: ->(issuer) { issuer.start_with?('My Awesome Company Inc') }, + verify_iss: true, + algorithm: 'HS256') +``` + +```ruby +JWT.decode(token, hmac_secret, true, + iss: method(:valid_issuer?), + verify_iss: true, + algorithm: 'HS256') + +# somewhere in the same class: +def valid_issuer?(issuer) + # custom validation +end +``` + +### Audience Claim + +From [Oauth JSON Web Token 4.1.3. "aud" (Audience) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.3): + +> The `aud` (audience) claim identifies the recipients that the JWT is intended for. Each principal intended to process the JWT MUST identify itself with a value in the audience claim. If the principal processing the claim does not identify itself with a value in the `aud` claim when this claim is present, then the JWT MUST be rejected. In the general case, the `aud` value is an array of case-sensitive strings, each containing a **_StringOrURI_** value. In the special case when the JWT has one audience, the `aud` value MAY be a single case-sensitive string containing a **_StringOrURI_** value. The interpretation of audience values is generally application specific. Use of this claim is OPTIONAL. + +```ruby +aud = ['Young', 'Old'] +aud_payload = { data: 'data', aud: aud } + +token = JWT.encode(aud_payload, hmac_secret, 'HS256') + +begin + # Add aud to the validation to check if the token has been manipulated + decoded_token = JWT.decode(token, hmac_secret, true, { aud: aud, verify_aud: true, algorithm: 'HS256' }) +rescue JWT::InvalidAudError + # Handle invalid token, e.g. logout user or deny access + puts 'Audience Error' +end +``` + +### JWT ID Claim + +From [Oauth JSON Web Token 4.1.7. "jti" (JWT ID) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.7): + +> The `jti` (JWT ID) claim provides a unique identifier for the JWT. The identifier value MUST be assigned in a manner that ensures that there is a negligible probability that the same value will be accidentally assigned to a different data object; if the application uses multiple issuers, collisions MUST be prevented among values produced by different issuers as well. The `jti` claim can be used to prevent the JWT from being replayed. The `jti` value is a case-sensitive string. Use of this claim is OPTIONAL. + +```ruby +# Use the secret and iat to create a unique key per request to prevent replay attacks +jti_raw = [hmac_secret, iat].join(':').to_s +jti = Digest::MD5.hexdigest(jti_raw) +jti_payload = { data: 'data', iat: iat, jti: jti } + +token = JWT.encode(jti_payload, hmac_secret, 'HS256') + +begin + # If :verify_jti is true, validation will pass if a JTI is present + #decoded_token = JWT.decode(token, hmac_secret, true, { verify_jti: true, algorithm: 'HS256' }) + # Alternatively, pass a proc with your own code to check if the JTI has already been used + decoded_token = JWT.decode(token, hmac_secret, true, { verify_jti: proc { |jti| my_validation_method(jti) }, algorithm: 'HS256' }) + # or + decoded_token = JWT.decode(token, hmac_secret, true, { verify_jti: proc { |jti, payload| my_validation_method(jti, payload) }, algorithm: 'HS256' }) +rescue JWT::InvalidJtiError + # Handle invalid token, e.g. logout user or deny access + puts 'Error' +end +``` + +### Issued At Claim + +From [Oauth JSON Web Token 4.1.6. "iat" (Issued At) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.6): + +> The `iat` (issued at) claim identifies the time at which the JWT was issued. This claim can be used to determine the age of the JWT. The `leeway` option is not taken into account when verifying this claim. The `iat_leeway` option was removed in version 2.2.0. Its value MUST be a number containing a **_NumericDate_** value. Use of this claim is OPTIONAL. + +```ruby +iat = Time.now.to_i +iat_payload = { data: 'data', iat: iat } + +token = JWT.encode(iat_payload, hmac_secret, 'HS256') + +begin + # Add iat to the validation to check if the token has been manipulated + decoded_token = JWT.decode(token, hmac_secret, true, { verify_iat: true, algorithm: 'HS256' }) +rescue JWT::InvalidIatError + # Handle invalid token, e.g. logout user or deny access +end +``` + +### Subject Claim + +From [Oauth JSON Web Token 4.1.2. "sub" (Subject) Claim](https://tools.ietf.org/html/rfc7519#section-4.1.2): + +> The `sub` (subject) claim identifies the principal that is the subject of the JWT. The Claims in a JWT are normally statements about the subject. The subject value MUST either be scoped to be locally unique in the context of the issuer or be globally unique. The processing of this claim is generally application specific. The sub value is a case-sensitive string containing a **_StringOrURI_** value. Use of this claim is OPTIONAL. + +```ruby +sub = 'Subject' +sub_payload = { data: 'data', sub: sub } + +token = JWT.encode(sub_payload, hmac_secret, 'HS256') + +begin + # Add sub to the validation to check if the token has been manipulated + decoded_token = JWT.decode(token, hmac_secret, true, { sub: sub, verify_sub: true, algorithm: 'HS256' }) +rescue JWT::InvalidSubError + # Handle invalid token, e.g. logout user or deny access +end +``` + +### Standalone claim verification + +The JWT claim verifications can be used to verify any Hash to include expected keys and values. + +A few example on verifying the claims for a payload: + +```ruby +JWT::Claims.verify_payload!({"exp" => Time.now.to_i + 10}, :numeric, :exp) +JWT::Claims.valid_payload?({"exp" => Time.now.to_i + 10}, :exp) +# => true +JWT::Claims.payload_errors({"exp" => Time.now.to_i - 10}, :exp) +# => [#] +JWT::Claims.verify_payload!({"exp" => Time.now.to_i - 10}, exp: { leeway: 11}) + +JWT::Claims.verify_payload!({"exp" => Time.now.to_i + 10, "sub" => "subject"}, :exp, sub: "subject") +``` + +### Finding a Key + +To dynamically find the key for verifying the JWT signature, pass a block to the decode block. The block receives headers and the original payload as parameters. It should return with the key to verify the signature that was used to sign the JWT. + +```ruby +issuers = %w[My_Awesome_Company1 My_Awesome_Company2] +iss_payload = { data: 'data', iss: issuers.first } + +secrets = { issuers.first => hmac_secret, issuers.last => 'hmac_secret2' } + +token = JWT.encode(iss_payload, hmac_secret, 'HS256') + +begin + # Add iss to the validation to check if the token has been manipulated + decoded_token = JWT.decode(token, nil, true, { iss: issuers, verify_iss: true, algorithm: 'HS256' }) do |_headers, payload| + secrets[payload['iss']] + end +rescue JWT::InvalidIssuerError + # Handle invalid token, e.g. logout user or deny access +end +``` + +### Required Claims + +You can specify claims that must be present for decoding to be successful. JWT::MissingRequiredClaim will be raised if any are missing + +```ruby +# Will raise a JWT::MissingRequiredClaim error if the 'exp' claim is absent +JWT.decode(token, hmac_secret, true, { required_claims: ['exp'], algorithm: 'HS256' }) +``` + +### X.509 certificates in x5c header + +A JWT signature can be verified using certificate(s) given in the `x5c` header. Before doing that, the trustworthiness of these certificate(s) must be established. This is done in accordance with RFC 5280 which (among other things) verifies the certificate(s) are issued by a trusted root certificate, the timestamps are valid, and none of the certificate(s) are revoked (i.e. being present in the root certificate's Certificate Revocation List). + +```ruby +root_certificates = [] # trusted `OpenSSL::X509::Certificate` objects +crl_uris = root_certificates.map(&:crl_uris) +crls = crl_uris.map do |uri| + # look up cached CRL by `uri` and return it if found, otherwise continue + crl = Net::HTTP.get(uri) + crl = OpenSSL::X509::CRL.new(crl) + # cache `crl` using `uri` as the key, expiry set to `crl.next_update` timestamp +end + +begin + JWT.decode(token, nil, true, { x5c: { root_certificates: root_certificates, crls: crls } }) +rescue JWT::DecodeError + # Handle error, e.g. x5c header certificate revoked or expired +end +``` + +## JSON Web Key (JWK) + +JWK is a JSON structure representing a cryptographic key. This gem currently supports RSA, EC, OKP and HMAC keys. OKP support requires [RbNaCl](https://github.com/RubyCrypto/rbnacl) and currently only supports the Ed25519 curve. + +To encode a JWT using your JWK: + +```ruby +optional_parameters = { kid: 'my-kid', use: 'sig', alg: 'RS512' } +jwk = JWT::JWK.new(OpenSSL::PKey::RSA.new(2048), optional_parameters) + +# Encoding +payload = { data: 'data' } +token = JWT.encode(payload, jwk.signing_key, jwk[:alg], kid: jwk[:kid]) + +# JSON Web Key Set for advertising your signing keys +jwks_hash = JWT::JWK::Set.new(jwk).export +``` + +To decode a JWT using a trusted entity's JSON Web Key Set (JWKS): + +```ruby +jwks = JWT::JWK::Set.new(jwks_hash) +jwks.filter! {|key| key[:use] == 'sig' } # Signing keys only! +algorithms = jwks.map { |key| key[:alg] }.compact.uniq +JWT.decode(token, nil, true, algorithms: algorithms, jwks: jwks) +``` + +The `jwks` option can also be given as a lambda that evaluates every time a key identifier is resolved. +This can be used to implement caching of remotely fetched JWK Sets. + +Key identifiers can be specified using `kid`, `x5t` header parameters. +If the requested identifier is not found from the given set the loader will be called a second time with the `kid_not_found` option set to `true`. +The application can choose to implement some kind of JWK cache invalidation or other mechanism to handle such cases. + +Tokens without a specified key identifier (`kid` or `x5t`) are rejected by default. +This behaviour may be overwritten by setting the `allow_nil_kid` option for `decode` to `true`. + +```ruby +jwks_loader = ->(options) do + # The jwk loader would fetch the set of JWKs from a trusted source. + # To avoid malicious requests triggering cache invalidations there needs to be + # some kind of grace time or other logic for determining the validity of the invalidation. + # This example only allows cache invalidations every 5 minutes. + if options[:kid_not_found] && @cache_last_update < Time.now.to_i - 300 + logger.info("Invalidating JWK cache. #{options[:kid]} not found from previous cache") + @cached_keys = nil + end + @cached_keys ||= begin + @cache_last_update = Time.now.to_i + # Replace with your own JWKS fetching routine + jwks = JWT::JWK::Set.new(jwks_hash) + jwks.select! { |key| key[:use] == 'sig' } # Signing Keys only + jwks + end +end + +begin + JWT.decode(token, nil, true, { algorithms: ['RS512'], jwks: jwks_loader }) +rescue JWT::JWKError + # Handle problems with the provided JWKs +rescue JWT::DecodeError + # Handle other decode related issues e.g. no kid in header, no matching public key found etc. +end +``` + +### Importing and exporting JSON Web Keys + +The ::JWT::JWK class can be used to import both JSON Web Keys and OpenSSL keys +and export to either format with and without the private key included. + +To include the private key in the export pass the `include_private` parameter to the export method. + +```ruby +# Import a JWK Hash (showing an HMAC example) +jwk = JWT::JWK.new({ kty: 'oct', k: 'my-secret', kid: 'my-kid' }) + +# Import an OpenSSL key +# You can optionally add descriptive parameters to the JWK +desc_params = { kid: 'my-kid', use: 'sig' } +jwk = JWT::JWK.new(OpenSSL::PKey::RSA.new(2048), desc_params) + +# Export as JWK Hash (public key only by default) +jwk_hash = jwk.export +jwk_hash_with_private_key = jwk.export(include_private: true) + +# Export as OpenSSL key +public_key = jwk.verify_key +private_key = jwk.signing_key if jwk.private? + +# You can also import and export entire JSON Web Key Sets +jwks_hash = { keys: [{ kty: 'oct', k: 'my-secret', kid: 'my-kid' }] } +jwks = JWT::JWK::Set.new(jwks_hash) +jwks_hash = jwks.export +``` + +### Key ID (kid) and JWKs + +The key id (kid) generation in the gem is a custom algorithm and not based on any standards. +To use a standardized JWK thumbprint (RFC 7638) as the kid for JWKs a generator type can be specified in the global configuration +or can be given to the JWK instance on initialization. + +```ruby +JWT.configuration.jwk.kid_generator_type = :rfc7638_thumbprint +# OR +JWT.configuration.jwk.kid_generator = ::JWT::JWK::Thumbprint +# OR +jwk = JWT::JWK.new(OpenSSL::PKey::RSA.new(2048), nil, kid_generator: ::JWT::JWK::Thumbprint) + +jwk_hash = jwk.export + +thumbprint_as_the_kid = jwk_hash[:kid] +``` + +## Development and testing + +The tests are written with rspec. [Appraisal](https://github.com/thoughtbot/appraisal) is used to ensure compatibility with 3rd party dependencies providing cryptographic features. + +```bash +bundle install +bundle exec appraisal rake test +``` + +## Releasing + +To cut a new release adjust the [version.rb](lib/jwt/version.rb) and [CHANGELOG](CHANGELOG.md) with desired version numbers and dates and commit the changes. Tag the release with the version number using the following command: + +```bash +rake release:source_control_push +``` + +This will tag a new version an trigger a [GitHub action](.github/workflows/push_gem.yml) that eventually will push the gem to rubygems.org. + +## How to contribute + +See [CONTRIBUTING](CONTRIBUTING.md). + +## Contributors + +See [AUTHORS](AUTHORS). + +## License + +See [LICENSE](LICENSE). diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/UPGRADING.md b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/UPGRADING.md new file mode 100644 index 000000000..10c95d1ea --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/UPGRADING.md @@ -0,0 +1,47 @@ +# Upgrading ruby-jwt to >= 3.0.0 + +## Removal of the indirect [RbNaCl](https://github.com/RubyCrypto/rbnacl) dependency + +Historically, the set of supported algorithms was extended by including the `rbnacl` gem in the application's Gemfile. On load, ruby-jwt tried to load the gem and, if available, extend the algorithms to those provided by the `rbnacl/libsodium` libraries. This indirect dependency has caused some maintenance pain and confusion about which versions of the gem are supported. + +Some work to ease the way alternative algorithms can be implemented has been done. This enables the extraction of the algorithm provided by `rbnacl`. + +The extracted algorithms now live in the [jwt-eddsa](https://rubygems.org/gems/jwt-eddsa) gem. + +### Dropped support for HS512256 + +The algorithm HS512256 (HMAC-SHA-512 truncated to 256-bits) is not part of any JWA/JWT RFC and therefore will not be supported anymore. It was part of the HMAC algorithms provided by the indirect [RbNaCl](https://github.com/RubyCrypto/rbnacl) dependency. Currently, there are no direct substitutes for the algorithm. + +### `JWT::EncodedToken#payload` will raise before token is verified + +To avoid accidental use of unverified tokens, the `JWT::EncodedToken#payload` method will raise an error if accessed before the token signature has been verified. + +To access the payload before verification, use the method `JWT::EncodedToken#unverified_payload`. + +## Stricter requirements on Base64 encoded data + +Base64 decoding will no longer fallback on the looser RFC 2045. The biggest difference is that the looser version was ignoring whitespaces and newlines, whereas the stricter version raises errors in such cases. + +If you, for example, read tokens from files, there could be problems with trailing newlines. Make sure you trim your input before passing it to the decoding mechanisms. + +## Claim verification revamp + +Claim verification has been [split into separate classes](https://github.com/jwt/ruby-jwt/pull/605) and has [a new API](https://github.com/jwt/ruby-jwt/pull/626), leading to the following deprecations: + +- The `::JWT::ClaimsValidator` class will be removed in favor of the functionality provided by `::JWT::Claims`. +- The `::JWT::Claims::verify!` method will be removed in favor of `::JWT::Claims::verify_payload!`. +- The `::JWT::JWA.create` method will be removed. +- The `::JWT::Verify` class will be removed in favor of the functionality provided by `::JWT::Claims`. +- Calling `::JWT::Claims::Numeric.new` with a payload will be removed in favor of `::JWT::Claims::verify_payload!(payload, :numeric)`. +- Calling `::JWT::Claims::Numeric.verify!` with a payload will be removed in favor of `::JWT::Claims::verify_payload!(payload, :numeric)`. + +## Algorithm restructuring + +The internal algorithms were [restructured](https://github.com/jwt/ruby-jwt/pull/607) to support extensions from separate libraries. The changes led to a few deprecations and new requirements: + +- The `sign` and `verify` static methods on all the algorithms (`::JWT::JWA`) will be removed. +- Custom algorithms are expected to include the `JWT::JWA::SigningAlgorithm` module. + +## Base64 the `k´ value for HMAC JWKs + +The gem was missing the Base64 encoding and decoding when representing and parsing a HMAC key as a JWK. This issue is now addressed. The added encoding will break compatibility with JWKs produced by older versions of the gem. diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt.rb new file mode 100644 index 000000000..86ac2e6ac --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'jwt/version' +require 'jwt/base64' +require 'jwt/json' +require 'jwt/decode' +require 'jwt/configuration' +require 'jwt/encode' +require 'jwt/error' +require 'jwt/jwk' +require 'jwt/claims' +require 'jwt/encoded_token' +require 'jwt/token' + +# JSON Web Token implementation +# +# Should be up to date with the latest spec: +# https://tools.ietf.org/html/rfc7519 +module JWT + extend ::JWT::Configuration + + module_function + + # Encodes a payload into a JWT. + # + # @param payload [Hash] the payload to encode. + # @param key [String] the key used to sign the JWT. + # @param algorithm [String] the algorithm used to sign the JWT. + # @param header_fields [Hash] additional headers to include in the JWT. + # @return [String] the encoded JWT. + def encode(payload, key, algorithm = 'HS256', header_fields = {}) + Encode.new(payload: payload, + key: key, + algorithm: algorithm, + headers: header_fields).segments + end + + # Decodes a JWT to extract the payload and header + # + # @param jwt [String] the JWT to decode. + # @param key [String] the key used to verify the JWT. + # @param verify [Boolean] whether to verify the JWT signature. + # @param options [Hash] additional options for decoding. + # @return [Array] the decoded payload and headers. + def decode(jwt, key = nil, verify = true, options = {}, &keyfinder) # rubocop:disable Style/OptionalBooleanParameter + Decode.new(jwt, key, verify, configuration.decode.to_h.merge(options), &keyfinder).decode_segments + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/base64.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/base64.rb new file mode 100644 index 000000000..fdf1bf95a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/base64.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'base64' + +module JWT + # Base64 encoding and decoding + # @api private + class Base64 + class << self + # Encode a string with URL-safe Base64 complying with RFC 4648 (not padded). + # @api private + def url_encode(str) + ::Base64.urlsafe_encode64(str, padding: false) + end + + # Decode a string with URL-safe Base64 complying with RFC 4648. + # @api private + def url_decode(str) + ::Base64.urlsafe_decode64(str) + rescue ArgumentError => e + raise unless e.message == 'invalid base64' + + raise Base64DecodeError, 'Invalid base64 encoding' + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims.rb new file mode 100644 index 000000000..45ed547b1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require_relative 'claims/audience' +require_relative 'claims/crit' +require_relative 'claims/decode_verifier' +require_relative 'claims/expiration' +require_relative 'claims/issued_at' +require_relative 'claims/issuer' +require_relative 'claims/jwt_id' +require_relative 'claims/not_before' +require_relative 'claims/numeric' +require_relative 'claims/required' +require_relative 'claims/subject' +require_relative 'claims/verifier' + +module JWT + # JWT Claim verifications + # https://datatracker.ietf.org/doc/html/rfc7519#section-4 + # + # Verification is supported for the following claims: + # exp + # nbf + # iss + # iat + # jti + # aud + # sub + # required + # numeric + module Claims + # Represents a claim verification error + Error = Struct.new(:message, keyword_init: true) + + class << self + # Checks if the claims in the JWT payload are valid. + # @example + # + # ::JWT::Claims.verify_payload!({"exp" => Time.now.to_i + 10}, :exp) + # ::JWT::Claims.verify_payload!({"exp" => Time.now.to_i - 10}, exp: { leeway: 11}) + # + # @param payload [Hash] the JWT payload. + # @param options [Array] the options for verifying the claims. + # @return [void] + # @raise [JWT::DecodeError] if any claim is invalid. + def verify_payload!(payload, *options) + Verifier.verify!(VerificationContext.new(payload: payload), *options) + end + + # Checks if the claims in the JWT payload are valid. + # + # @param payload [Hash] the JWT payload. + # @param options [Array] the options for verifying the claims. + # @return [Boolean] true if the claims are valid, false otherwise + def valid_payload?(payload, *options) + payload_errors(payload, *options).empty? + end + + # Returns the errors in the claims of the JWT token. + # + # @param options [Array] the options for verifying the claims. + # @return [Array] the errors in the claims of the JWT + def payload_errors(payload, *options) + Verifier.errors(VerificationContext.new(payload: payload), *options) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/audience.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/audience.rb new file mode 100644 index 000000000..f828fc592 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/audience.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The Audience class is responsible for validating the audience claim ('aud') in a JWT token. + class Audience + # Initializes a new Audience instance. + # + # @param expected_audience [String, Array] the expected audience(s) for the JWT token. + def initialize(expected_audience:) + @expected_audience = expected_audience + end + + # Verifies the audience claim ('aud') in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::InvalidAudError] if the audience claim is invalid. + # @return [nil] + def verify!(context:, **_args) + aud = context.payload['aud'] + raise JWT::InvalidAudError, "Invalid audience. Expected #{expected_audience}, received #{aud || ''}" if ([*aud] & [*expected_audience]).empty? + end + + private + + attr_reader :expected_audience + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/crit.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/crit.rb new file mode 100644 index 000000000..ac339eb0b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/crit.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module JWT + module Claims + # Responsible of validation the crit header + class Crit + # Initializes a new Crit instance. + # + # @param expected_crits [String] the expected crit header values for the JWT token. + def initialize(expected_crits:) + @expected_crits = Array(expected_crits) + end + + # Verifies the critical claim ('crit') in the JWT token header. + # + # @param context [Object] the context containing the JWT payload and header. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::InvalidCritError] if the crit claim is invalid. + # @return [nil] + def verify!(context:, **_args) + raise(JWT::InvalidCritError, 'Crit header missing') unless context.header['crit'] + raise(JWT::InvalidCritError, 'Crit header should be an array') unless context.header['crit'].is_a?(Array) + + missing = (expected_crits - context.header['crit']) + raise(JWT::InvalidCritError, "Crit header missing expected values: #{missing.join(', ')}") if missing.any? + + nil + end + + private + + attr_reader :expected_crits + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/decode_verifier.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/decode_verifier.rb new file mode 100644 index 000000000..411bb97cd --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/decode_verifier.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module JWT + module Claims + # Context class to contain the data passed to individual claim validators + # + # @api private + VerificationContext = Struct.new(:payload, keyword_init: true) + + # Verifiers to support the ::JWT.decode method + # + # @api private + module DecodeVerifier + VERIFIERS = { + verify_expiration: ->(options) { Claims::Expiration.new(leeway: options[:exp_leeway] || options[:leeway]) }, + verify_not_before: ->(options) { Claims::NotBefore.new(leeway: options[:nbf_leeway] || options[:leeway]) }, + verify_iss: ->(options) { options[:iss] && Claims::Issuer.new(issuers: options[:iss]) }, + verify_iat: ->(*) { Claims::IssuedAt.new }, + verify_jti: ->(options) { Claims::JwtId.new(validator: options[:verify_jti]) }, + verify_aud: ->(options) { options[:aud] && Claims::Audience.new(expected_audience: options[:aud]) }, + verify_sub: ->(options) { options[:sub] && Claims::Subject.new(expected_subject: options[:sub]) }, + required_claims: ->(options) { Claims::Required.new(required_claims: options[:required_claims]) } + }.freeze + + private_constant(:VERIFIERS) + + class << self + # @api private + def verify!(payload, options) + VERIFIERS.each do |key, verifier_builder| + next unless options[key] || options[key.to_s] + + verifier_builder&.call(options)&.verify!(context: VerificationContext.new(payload: payload)) + end + nil + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/expiration.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/expiration.rb new file mode 100644 index 000000000..0412dc4c7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/expiration.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The Expiration class is responsible for validating the expiration claim ('exp') in a JWT token. + class Expiration + # Initializes a new Expiration instance. + # + # @param leeway [Integer] the amount of leeway (in seconds) to allow when validating the expiration time. Default: 0. + def initialize(leeway:) + @leeway = leeway || 0 + end + + # Verifies the expiration claim ('exp') in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::ExpiredSignature] if the token has expired. + # @return [nil] + def verify!(context:, **_args) + return unless context.payload.is_a?(Hash) + return unless context.payload.key?('exp') + + raise JWT::ExpiredSignature, 'Signature has expired' if context.payload['exp'].to_i <= (Time.now.to_i - leeway) + end + + private + + attr_reader :leeway + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issued_at.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issued_at.rb new file mode 100644 index 000000000..0eb08446b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issued_at.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The IssuedAt class is responsible for validating the issued at claim ('iat') in a JWT token. + class IssuedAt + # Verifies the issued at claim ('iat') in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::InvalidIatError] if the issued at claim is invalid. + # @return [nil] + def verify!(context:, **_args) + return unless context.payload.is_a?(Hash) + return unless context.payload.key?('iat') + + iat = context.payload['iat'] + raise(JWT::InvalidIatError, 'Invalid iat') if !iat.is_a?(::Numeric) || iat.to_f > Time.now.to_f + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issuer.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issuer.rb new file mode 100644 index 000000000..ca8783752 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/issuer.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The Issuer class is responsible for validating the issuer claim ('iss') in a JWT token. + class Issuer + # Initializes a new Issuer instance. + # + # @param issuers [String, Symbol, Array] the expected issuer(s) for the JWT token. + def initialize(issuers:) + @issuers = Array(issuers).map { |item| item.is_a?(Symbol) ? item.to_s : item } + end + + # Verifies the issuer claim ('iss') in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::InvalidIssuerError] if the issuer claim is invalid. + # @return [nil] + def verify!(context:, **_args) + case (iss = context.payload['iss']) + when *issuers + nil + else + raise JWT::InvalidIssuerError, "Invalid issuer. Expected #{issuers}, received #{iss || ''}" + end + end + + private + + attr_reader :issuers + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/jwt_id.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/jwt_id.rb new file mode 100644 index 000000000..8d17fdccb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/jwt_id.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The JwtId class is responsible for validating the JWT ID claim ('jti') in a JWT token. + class JwtId + # Initializes a new JwtId instance. + # + # @param validator [#call] an object responding to `call` to validate the JWT ID. + def initialize(validator:) + @validator = validator + end + + # Verifies the JWT ID claim ('jti') in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::InvalidJtiError] if the JWT ID claim is invalid or missing. + # @return [nil] + def verify!(context:, **_args) + jti = context.payload['jti'] + if validator.respond_to?(:call) + verified = validator.arity == 2 ? validator.call(jti, context.payload) : validator.call(jti) + raise(JWT::InvalidJtiError, 'Invalid jti') unless verified + elsif jti.to_s.strip.empty? + raise(JWT::InvalidJtiError, 'Missing jti') + end + end + + private + + attr_reader :validator + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/not_before.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/not_before.rb new file mode 100644 index 000000000..879ad0319 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/not_before.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The NotBefore class is responsible for validating the 'nbf' (Not Before) claim in a JWT token. + class NotBefore + # Initializes a new NotBefore instance. + # + # @param leeway [Integer] the amount of leeway (in seconds) to allow when validating the 'nbf' claim. Defaults to 0. + def initialize(leeway:) + @leeway = leeway || 0 + end + + # Verifies the 'nbf' (Not Before) claim in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::ImmatureSignature] if the 'nbf' claim has not been reached. + # @return [nil] + def verify!(context:, **_args) + return unless context.payload.is_a?(Hash) + return unless context.payload.key?('nbf') + + raise JWT::ImmatureSignature, 'Signature nbf has not been reached' if context.payload['nbf'].to_i > (Time.now.to_i + leeway) + end + + private + + attr_reader :leeway + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/numeric.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/numeric.rb new file mode 100644 index 000000000..7589dc6bf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/numeric.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The Numeric class is responsible for validating numeric claims in a JWT token. + # The numeric claims are: exp, iat and nbf + class Numeric + # List of numeric claims that can be validated. + NUMERIC_CLAIMS = %i[ + exp + iat + nbf + ].freeze + + private_constant(:NUMERIC_CLAIMS) + + # Verifies the numeric claims in the JWT context. + # + # @param context [Object] the context containing the JWT payload. + # @raise [JWT::InvalidClaimError] if any numeric claim is invalid. + # @return [nil] + def verify!(context:) + validate_numeric_claims(context.payload) + end + + private + + def validate_numeric_claims(payload) + NUMERIC_CLAIMS.each do |claim| + validate_is_numeric(payload, claim) + end + end + + def validate_is_numeric(payload, claim) + return unless payload.is_a?(Hash) + return unless payload.key?(claim) || + payload.key?(claim.to_s) + + return if payload[claim].is_a?(::Numeric) || payload[claim.to_s].is_a?(::Numeric) + + raise InvalidPayload, "#{claim} claim must be a Numeric value but it is a #{(payload[claim] || payload[claim.to_s]).class}" + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/required.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/required.rb new file mode 100644 index 000000000..e0f0e1d77 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/required.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The Required class is responsible for validating that all required claims are present in a JWT token. + class Required + # Initializes a new Required instance. + # + # @param required_claims [Array] the list of required claims. + def initialize(required_claims:) + @required_claims = required_claims + end + + # Verifies that all required claims are present in the JWT payload. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::MissingRequiredClaim] if any required claim is missing. + # @return [nil] + def verify!(context:, **_args) + required_claims.each do |required_claim| + next if context.payload.is_a?(Hash) && context.payload.key?(required_claim) + + raise JWT::MissingRequiredClaim, "Missing required claim #{required_claim}" + end + end + + private + + attr_reader :required_claims + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/subject.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/subject.rb new file mode 100644 index 000000000..18b26eebf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/subject.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module JWT + module Claims + # The Subject class is responsible for validating the subject claim ('sub') in a JWT token. + class Subject + # Initializes a new Subject instance. + # + # @param expected_subject [String] the expected subject for the JWT token. + def initialize(expected_subject:) + @expected_subject = expected_subject.to_s + end + + # Verifies the subject claim ('sub') in the JWT token. + # + # @param context [Object] the context containing the JWT payload. + # @param _args [Hash] additional arguments (not used). + # @raise [JWT::InvalidSubError] if the subject claim is invalid. + # @return [nil] + def verify!(context:, **_args) + sub = context.payload['sub'] + raise(JWT::InvalidSubError, "Invalid subject. Expected #{expected_subject}, received #{sub || ''}") unless sub.to_s == expected_subject + end + + private + + attr_reader :expected_subject + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/verifier.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/verifier.rb new file mode 100644 index 000000000..81ce8a23d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/claims/verifier.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module JWT + module Claims + # @api private + module Verifier + VERIFIERS = { + exp: ->(options) { Claims::Expiration.new(leeway: options.dig(:exp, :leeway)) }, + nbf: ->(options) { Claims::NotBefore.new(leeway: options.dig(:nbf, :leeway)) }, + iss: ->(options) { Claims::Issuer.new(issuers: options[:iss]) }, + iat: ->(*) { Claims::IssuedAt.new }, + jti: ->(options) { Claims::JwtId.new(validator: options[:jti]) }, + aud: ->(options) { Claims::Audience.new(expected_audience: options[:aud]) }, + sub: ->(options) { Claims::Subject.new(expected_subject: options[:sub]) }, + crit: ->(options) { Claims::Crit.new(expected_crits: options[:crit]) }, + required: ->(options) { Claims::Required.new(required_claims: options[:required]) }, + numeric: ->(*) { Claims::Numeric.new } + }.freeze + + private_constant(:VERIFIERS) + + class << self + # @api private + def verify!(context, *options) + iterate_verifiers(*options) do |verifier, verifier_options| + verify_one!(context, verifier, verifier_options) + end + nil + end + + # @api private + def errors(context, *options) + errors = [] + iterate_verifiers(*options) do |verifier, verifier_options| + verify_one!(context, verifier, verifier_options) + rescue ::JWT::DecodeError => e + errors << Error.new(message: e.message) + end + errors + end + + private + + def iterate_verifiers(*options) + options.each do |element| + if element.is_a?(Hash) + element.each_key { |key| yield(key, element) } + else + yield(element, {}) + end + end + end + + def verify_one!(context, verifier, options) + verifier_builder = VERIFIERS.fetch(verifier) { raise ArgumentError, "#{verifier} not a valid claim verifier" } + verifier_builder.call(options || {}).verify!(context: context) + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration.rb new file mode 100644 index 000000000..cdd37a4ab --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require_relative 'configuration/container' + +module JWT + # The Configuration module provides methods to configure JWT settings. + module Configuration + # Configures the JWT settings. + # + # @yield [config] Gives the current configuration to the block. + # @yieldparam config [JWT::Configuration::Container] the configuration container. + def configure + yield(configuration) + end + + # Returns the JWT configuration container. + # + # @return [JWT::Configuration::Container] the configuration container. + def configuration + @configuration ||= ::JWT::Configuration::Container.new + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/container.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/container.rb new file mode 100644 index 000000000..9351d965e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/container.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require_relative 'decode_configuration' +require_relative 'jwk_configuration' + +module JWT + module Configuration + # The Container class holds the configuration settings for JWT. + class Container + # @!attribute [rw] decode + # @return [DecodeConfiguration] the decode configuration. + # @!attribute [rw] jwk + # @return [JwkConfiguration] the JWK configuration. + # @!attribute [rw] strict_base64_decoding + # @return [Boolean] whether strict Base64 decoding is enabled. + attr_accessor :decode, :jwk, :strict_base64_decoding + + # @!attribute [r] deprecation_warnings + # @return [Symbol] the deprecation warnings setting. + attr_reader :deprecation_warnings + + # Initializes a new Container instance and resets the configuration. + def initialize + reset! + end + + # Resets the configuration to default values. + # + # @return [void] + def reset! + @decode = DecodeConfiguration.new + @jwk = JwkConfiguration.new + + self.deprecation_warnings = :once + end + + DEPRECATION_WARNINGS_VALUES = %i[once warn silent].freeze + private_constant(:DEPRECATION_WARNINGS_VALUES) + # Sets the deprecation warnings setting. + # + # @param value [Symbol] the deprecation warnings setting. Must be one of `:once`, `:warn`, or `:silent`. + # @raise [ArgumentError] if the value is not one of the supported values. + # @return [void] + def deprecation_warnings=(value) + raise ArgumentError, "Invalid deprecation_warnings value #{value}. Supported values: #{DEPRECATION_WARNINGS_VALUES}" unless DEPRECATION_WARNINGS_VALUES.include?(value) + + @deprecation_warnings = value + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/decode_configuration.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/decode_configuration.rb new file mode 100644 index 000000000..4acfd3ebb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/decode_configuration.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +module JWT + module Configuration + # The DecodeConfiguration class holds the configuration settings for decoding JWT tokens. + class DecodeConfiguration + # @!attribute [rw] verify_expiration + # @return [Boolean] whether to verify the expiration claim. + # @!attribute [rw] verify_not_before + # @return [Boolean] whether to verify the not before claim. + # @!attribute [rw] verify_iss + # @return [Boolean] whether to verify the issuer claim. + # @!attribute [rw] verify_iat + # @return [Boolean] whether to verify the issued at claim. + # @!attribute [rw] verify_jti + # @return [Boolean] whether to verify the JWT ID claim. + # @!attribute [rw] verify_aud + # @return [Boolean] whether to verify the audience claim. + # @!attribute [rw] verify_sub + # @return [Boolean] whether to verify the subject claim. + # @!attribute [rw] leeway + # @return [Integer] the leeway in seconds for time-based claims. + # @!attribute [rw] algorithms + # @return [Array] the list of acceptable algorithms. + # @!attribute [rw] required_claims + # @return [Array] the list of required claims. + + attr_accessor :verify_expiration, + :verify_not_before, + :verify_iss, + :verify_iat, + :verify_jti, + :verify_aud, + :verify_sub, + :leeway, + :algorithms, + :required_claims + + # Initializes a new DecodeConfiguration instance with default settings. + def initialize + @verify_expiration = true + @verify_not_before = true + @verify_iss = false + @verify_iat = false + @verify_jti = false + @verify_aud = false + @verify_sub = false + @leeway = 0 + @algorithms = ['HS256'] + @required_claims = [] + end + + # @api private + def to_h + { + verify_expiration: verify_expiration, + verify_not_before: verify_not_before, + verify_iss: verify_iss, + verify_iat: verify_iat, + verify_jti: verify_jti, + verify_aud: verify_aud, + verify_sub: verify_sub, + leeway: leeway, + algorithms: algorithms, + required_claims: required_claims + } + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/jwk_configuration.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/jwk_configuration.rb new file mode 100644 index 000000000..f1373bcee --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/configuration/jwk_configuration.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require_relative '../jwk/kid_as_key_digest' +require_relative '../jwk/thumbprint' + +module JWT + module Configuration + # @api private + class JwkConfiguration + def initialize + self.kid_generator_type = :key_digest + end + + def kid_generator_type=(value) + self.kid_generator = case value + when :key_digest + JWT::JWK::KidAsKeyDigest + when :rfc7638_thumbprint + JWT::JWK::Thumbprint + else + raise ArgumentError, "#{value} is not a valid kid generator type." + end + end + + attr_accessor :kid_generator + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/decode.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/decode.rb new file mode 100644 index 000000000..9a8a0a60b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/decode.rb @@ -0,0 +1,123 @@ +# frozen_string_literal: true + +require 'json' +require 'jwt/x5c_key_finder' + +module JWT + # The Decode class is responsible for decoding and verifying JWT tokens. + class Decode + # Order is very important - first check for string keys, next for symbols + ALGORITHM_KEYS = ['algorithm', + :algorithm, + 'algorithms', + :algorithms].freeze + # Initializes a new Decode instance. + # + # @param jwt [String] the JWT to decode. + # @param key [String, Array] the key(s) to use for verification. + # @param verify [Boolean] whether to verify the token's signature. + # @param options [Hash] additional options for decoding and verification. + # @param keyfinder [Proc] an optional key finder block to dynamically find the key for verification. + # @raise [JWT::DecodeError] if decoding or verification fails. + def initialize(jwt, key, verify, options, &keyfinder) + raise JWT::DecodeError, 'Nil JSON web token' unless jwt + + @token = EncodedToken.new(jwt) + @key = key + @options = options + @verify = verify + @keyfinder = keyfinder + end + + # Decodes the JWT token and verifies its segments if verification is enabled. + # + # @return [Array] an array containing the decoded payload and header. + def decode_segments + validate_segment_count! + if @verify + verify_algo + set_key + verify_signature + Claims::DecodeVerifier.verify!(token.unverified_payload, @options) + end + + [token.unverified_payload, token.header] + end + + private + + attr_reader :token + + def verify_signature + return if none_algorithm? + + raise JWT::DecodeError, 'No verification key available' unless @key + + token.verify_signature!(algorithm: allowed_and_valid_algorithms, key: @key) + end + + def verify_algo + raise JWT::IncorrectAlgorithm, 'An algorithm must be specified' if allowed_algorithms.empty? + raise JWT::DecodeError, 'Token header not a JSON object' unless token.header.is_a?(Hash) + raise JWT::IncorrectAlgorithm, 'Token is missing alg header' unless alg_in_header + raise JWT::IncorrectAlgorithm, 'Expected a different algorithm' if allowed_and_valid_algorithms.empty? + end + + def set_key + @key = find_key(&@keyfinder) if @keyfinder + if @options[:jwks] + @key = ::JWT::JWK::KeyFinder.new( + jwks: @options[:jwks], + allow_nil_kid: @options[:allow_nil_kid], + key_fields: @options[:key_fields] + ).call(token) + end + + return unless (x5c_options = @options[:x5c]) + + @key = X5cKeyFinder.new(x5c_options[:root_certificates], x5c_options[:crls]).from(token.header['x5c']) + end + + def allowed_and_valid_algorithms + @allowed_and_valid_algorithms ||= allowed_algorithms.select { |alg| alg.valid_alg?(alg_in_header) } + end + + def given_algorithms + alg_key = ALGORITHM_KEYS.find { |key| @options[key] } + Array(@options[alg_key]) + end + + def allowed_algorithms + @allowed_algorithms ||= resolve_allowed_algorithms + end + + def resolve_allowed_algorithms + given_algorithms.map { |alg| JWA.resolve(alg) } + end + + def find_key(&keyfinder) + key = (keyfinder.arity == 2 ? yield(token.header, token.unverified_payload) : yield(token.header)) + # key can be of type [string, nil, OpenSSL::PKey, Array] + return key if key && !Array(key).empty? + + raise JWT::DecodeError, 'No verification key available' + end + + def validate_segment_count! + segment_count = token.jwt.count('.') + 1 + return if segment_count == 3 + return if !@verify && segment_count == 2 # If no verifying required, the signature is not needed + return if segment_count == 2 && none_algorithm? + + raise JWT::DecodeError, 'Not enough or too many segments' + end + + def none_algorithm? + alg_in_header == 'none' + end + + def alg_in_header + token.header['alg'] + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encode.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encode.rb new file mode 100644 index 000000000..32164e09e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encode.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require_relative 'jwa' + +module JWT + # The Encode class is responsible for encoding JWT tokens. + class Encode + # Initializes a new Encode instance. + # + # @param options [Hash] the options for encoding the JWT token. + # @option options [Hash] :payload the payload of the JWT token. + # @option options [Hash] :headers the headers of the JWT token. + # @option options [String] :key the key used to sign the JWT token. + # @option options [String] :algorithm the algorithm used to sign the JWT token. + def initialize(options) + @token = Token.new(payload: options[:payload], header: options[:headers]) + @key = options[:key] + @algorithm = options[:algorithm] + end + + # Encodes the JWT token and returns its segments. + # + # @return [String] the encoded JWT token. + def segments + @token.verify_claims!(:numeric) + @token.sign!(algorithm: @algorithm, key: @key) + @token.jwt + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encoded_token.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encoded_token.rb new file mode 100644 index 000000000..cbaec1c8d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/encoded_token.rb @@ -0,0 +1,236 @@ +# frozen_string_literal: true + +module JWT + # Represents an encoded JWT token + # + # Processing an encoded and signed token: + # + # token = JWT::Token.new(payload: {pay: 'load'}) + # token.sign!(algorithm: 'HS256', key: 'secret') + # + # encoded_token = JWT::EncodedToken.new(token.jwt) + # encoded_token.verify_signature!(algorithm: 'HS256', key: 'secret') + # encoded_token.payload # => {'pay' => 'load'} + class EncodedToken + # @private + # Allow access to the unverified payload for claim verification. + class ClaimsContext + extend Forwardable + + def_delegators :@token, :header, :unverified_payload + + def initialize(token) + @token = token + end + + def payload + unverified_payload + end + end + + DEFAULT_CLAIMS = [:exp].freeze + + private_constant(:DEFAULT_CLAIMS) + + # Returns the original token provided to the class. + # @return [String] The JWT token. + attr_reader :jwt + + # Initializes a new EncodedToken instance. + # + # @param jwt [String] the encoded JWT token. + # @raise [ArgumentError] if the provided JWT is not a String. + def initialize(jwt) + raise ArgumentError, 'Provided JWT must be a String' unless jwt.is_a?(String) + + @jwt = jwt + @signature_verified = false + @claims_verified = false + + @encoded_header, @encoded_payload, @encoded_signature = jwt.split('.') + end + + # Returns the decoded signature of the JWT token. + # + # @return [String] the decoded signature. + def signature + @signature ||= ::JWT::Base64.url_decode(encoded_signature || '') + end + + # Returns the encoded signature of the JWT token. + # + # @return [String] the encoded signature. + attr_reader :encoded_signature + + # Returns the decoded header of the JWT token. + # + # @return [Hash] the header. + def header + @header ||= parse_and_decode(@encoded_header) + end + + # Returns the encoded header of the JWT token. + # + # @return [String] the encoded header. + attr_reader :encoded_header + + # Returns the payload of the JWT token. Access requires the signature and claims to have been verified. + # + # @return [Hash] the payload. + # @raise [JWT::DecodeError] if the signature has not been verified. + def payload + raise JWT::DecodeError, 'Verify the token signature before accessing the payload' unless @signature_verified + raise JWT::DecodeError, 'Verify the token claims before accessing the payload' unless @claims_verified + + decoded_payload + end + + # Returns the payload of the JWT token without requiring the signature to have been verified. + # @return [Hash] the payload. + def unverified_payload + decoded_payload + end + + # Sets or returns the encoded payload of the JWT token. + # + # @return [String] the encoded payload. + attr_accessor :encoded_payload + + # Returns the signing input of the JWT token. + # + # @return [String] the signing input. + def signing_input + [encoded_header, encoded_payload].join('.') + end + + # Verifies the token signature and claims. + # By default it verifies the 'exp' claim. + # + # @example + # encoded_token.verify!(signature: { algorithm: 'HS256', key: 'secret' }, claims: [:exp]) + # + # @param signature [Hash] the parameters for signature verification (see {#verify_signature!}). + # @param claims [Array, Hash] the claims to verify (see {#verify_claims!}). + # @return [nil] + # @raise [JWT::DecodeError] if the signature or claim verification fails. + def verify!(signature:, claims: nil) + verify_signature!(**signature) + claims.is_a?(Array) ? verify_claims!(*claims) : verify_claims!(claims) + nil + end + + # Verifies the token signature and claims. + # By default it verifies the 'exp' claim. + + # @param signature [Hash] the parameters for signature verification (see {#verify_signature!}). + # @param claims [Array, Hash] the claims to verify (see {#verify_claims!}). + # @return [Boolean] true if the signature and claims are valid, false otherwise. + def valid?(signature:, claims: nil) + valid_signature?(**signature) && + (claims.is_a?(Array) ? valid_claims?(*claims) : valid_claims?(claims)) + end + + # Verifies the signature of the JWT token. + # + # @param algorithm [String, Array, Object, Array] the algorithm(s) to use for verification. + # @param key [String, Array] the key(s) to use for verification. + # @param key_finder [#call] an object responding to `call` to find the key for verification. + # @return [nil] + # @raise [JWT::VerificationError] if the signature verification fails. + # @raise [ArgumentError] if neither key nor key_finder is provided, or if both are provided. + def verify_signature!(algorithm:, key: nil, key_finder: nil) + return if valid_signature?(algorithm: algorithm, key: key, key_finder: key_finder) + + raise JWT::VerificationError, 'Signature verification failed' + end + + # Checks if the signature of the JWT token is valid. + # + # @param algorithm [String, Array, Object, Array] the algorithm(s) to use for verification. + # @param key [String, Array, JWT::JWK::KeyBase, Array] the key(s) to use for verification. + # @param key_finder [#call] an object responding to `call` to find the key for verification. + # @return [Boolean] true if the signature is valid, false otherwise. + def valid_signature?(algorithm: nil, key: nil, key_finder: nil) + raise ArgumentError, 'Provide either key or key_finder, not both or neither' if key.nil? == key_finder.nil? + + keys = Array(key || key_finder.call(self)) + verifiers = JWA.create_verifiers(algorithms: algorithm, keys: keys, preferred_algorithm: header['alg']) + + raise JWT::VerificationError, 'No algorithm provided' if verifiers.empty? + + valid = verifiers.any? do |jwa| + jwa.verify(data: signing_input, signature: signature) + end + valid.tap { |verified| @signature_verified = verified } + end + + # Verifies the claims of the token. + # @param options [Array, Hash] the claims to verify. By default, it checks the 'exp' claim. + # @raise [JWT::DecodeError] if the claims are invalid. + def verify_claims!(*options) + Claims::Verifier.verify!(ClaimsContext.new(self), *claims_options(options)).tap do + @claims_verified = true + end + rescue StandardError + @claims_verified = false + raise + end + + # Returns the errors of the claims of the token. + # @param options [Array, Hash] the claims to verify. By default, it checks the 'exp' claim. + # @return [Array] the errors of the claims. + def claim_errors(*options) + Claims::Verifier.errors(ClaimsContext.new(self), *claims_options(options)) + end + + # Returns whether the claims of the token are valid. + # @param options [Array, Hash] the claims to verify. By default, it checks the 'exp' claim. + # @return [Boolean] whether the claims are valid. + def valid_claims?(*options) + claim_errors(*claims_options(options)).empty?.tap { |verified| @claims_verified = verified } + end + + alias to_s jwt + + private + + def claims_options(options) + return DEFAULT_CLAIMS if options.first.nil? + + options + end + + def decode_payload + raise JWT::DecodeError, 'Encoded payload is empty' if encoded_payload == '' + + if unencoded_payload? + verify_claims!(crit: ['b64']) + return parse_unencoded(encoded_payload) + end + + parse_and_decode(encoded_payload) + end + + def unencoded_payload? + header['b64'] == false + end + + def parse_and_decode(segment) + parse(::JWT::Base64.url_decode(segment || '')) + end + + def parse_unencoded(segment) + parse(segment) + end + + def parse(segment) + JWT::JSON.parse(segment) + rescue ::JSON::ParserError + raise JWT::DecodeError, 'Invalid segment encoding' + end + + def decoded_payload + @decoded_payload ||= decode_payload + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/error.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/error.rb new file mode 100644 index 000000000..2a0f8a2ce --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/error.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +module JWT + # The EncodeError class is raised when there is an error encoding a JWT. + class EncodeError < StandardError; end + + # The DecodeError class is raised when there is an error decoding a JWT. + class DecodeError < StandardError; end + + # The VerificationError class is raised when there is an error verifying a JWT. + class VerificationError < DecodeError; end + + # The ExpiredSignature class is raised when the JWT signature has expired. + class ExpiredSignature < DecodeError; end + + # The IncorrectAlgorithm class is raised when the JWT algorithm is incorrect. + class IncorrectAlgorithm < DecodeError; end + + # The ImmatureSignature class is raised when the JWT signature is immature. + class ImmatureSignature < DecodeError; end + + # The InvalidIssuerError class is raised when the JWT issuer is invalid. + class InvalidIssuerError < DecodeError; end + + # The UnsupportedEcdsaCurve class is raised when the ECDSA curve is unsupported. + class UnsupportedEcdsaCurve < IncorrectAlgorithm; end + + # The InvalidIatError class is raised when the JWT issued at (iat) claim is invalid. + class InvalidIatError < DecodeError; end + + # The InvalidAudError class is raised when the JWT audience (aud) claim is invalid. + class InvalidAudError < DecodeError; end + + # The InvalidSubError class is raised when the JWT subject (sub) claim is invalid. + class InvalidSubError < DecodeError; end + + # The InvalidCritError class is raised when the JWT crit header is invalid. + class InvalidCritError < DecodeError; end + + # The InvalidJtiError class is raised when the JWT ID (jti) claim is invalid. + class InvalidJtiError < DecodeError; end + + # The InvalidPayload class is raised when the JWT payload is invalid. + class InvalidPayload < DecodeError; end + + # The MissingRequiredClaim class is raised when a required claim is missing from the JWT. + class MissingRequiredClaim < DecodeError; end + + # The Base64DecodeError class is raised when there is an error decoding a Base64-encoded string. + class Base64DecodeError < DecodeError; end + + # The JWKError class is raised when there is an error with the JSON Web Key (JWK). + class JWKError < DecodeError; end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/json.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/json.rb new file mode 100644 index 000000000..90ae45855 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/json.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'json' + +module JWT + # @api private + class JSON + class << self + def generate(data) + ::JSON.generate(data) + end + + def parse(data) + ::JSON.parse(data) + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa.rb new file mode 100644 index 000000000..11f23c4b3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa.rb @@ -0,0 +1,103 @@ +# frozen_string_literal: true + +require 'openssl' + +require_relative 'jwa/signing_algorithm' +require_relative 'jwa/ecdsa' +require_relative 'jwa/hmac' +require_relative 'jwa/none' +require_relative 'jwa/ps' +require_relative 'jwa/rsa' +require_relative 'jwa/unsupported' + +module JWT + # The JWA module contains all supported algorithms. + module JWA + # @api private + class VerifierContext + attr_reader :jwa + + def initialize(jwa:, keys:) + @jwa = jwa + @keys = Array(keys) + end + + def verify(*args, **kwargs) + @keys.any? do |key| + @jwa.verify(*args, **kwargs, verification_key: key) + end + end + end + + # @api private + class SignerContext + attr_reader :jwa + + def initialize(jwa:, key:) + @jwa = jwa + @key = key + end + + def sign(*args, **kwargs) + @jwa.sign(*args, **kwargs, signing_key: @key) + end + end + + class << self + # @api private + def resolve(algorithm) + return find(algorithm) if algorithm.is_a?(String) || algorithm.is_a?(Symbol) + + raise ArgumentError, 'Algorithm must be provided' if algorithm.nil? + + raise ArgumentError, 'Custom algorithms are required to include JWT::JWA::SigningAlgorithm' unless algorithm.is_a?(SigningAlgorithm) + + algorithm + end + + # @api private + def resolve_and_sort(algorithms:, preferred_algorithm:) + Array(algorithms).map { |alg| JWA.resolve(alg) } + .partition { |alg| alg.valid_alg?(preferred_algorithm) } + .flatten + end + + # @api private + def create_signer(algorithm:, key:) + if key.is_a?(JWK::KeyBase) + validate_jwk_algorithms!(key, algorithm, DecodeError) + + return key + end + + SignerContext.new(jwa: resolve(algorithm), key: key) + end + + # @api private + def create_verifiers(algorithms:, keys:, preferred_algorithm:) + jwks, other_keys = keys.partition { |key| key.is_a?(JWK::KeyBase) } + + validate_jwk_algorithms!(jwks, algorithms, VerificationError) + + jwks + resolve_and_sort(algorithms: algorithms, + preferred_algorithm: preferred_algorithm) + .map { |jwa| VerifierContext.new(jwa: jwa, keys: other_keys) } + end + + # @api private + def validate_jwk_algorithms!(jwks, algorithms, error_class) + algorithms = Array(algorithms) + + return if algorithms.empty? + + return if Array(jwks).all? do |jwk| + algorithms.any? do |alg| + jwk.jwa.valid_alg?(alg) + end + end + + raise error_class, "Provided JWKs do not support one of the specified algorithms: #{algorithms.join(', ')}" + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ecdsa.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ecdsa.rb new file mode 100644 index 000000000..9840621f7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ecdsa.rb @@ -0,0 +1,111 @@ +# frozen_string_literal: true + +module JWT + module JWA + # ECDSA signing algorithm + class Ecdsa + include JWT::JWA::SigningAlgorithm + + def initialize(alg, digest) + @alg = alg + @digest = digest + end + + def sign(data:, signing_key:) + raise_sign_error!("The given key is a #{signing_key.class}. It has to be an OpenSSL::PKey::EC instance") unless signing_key.is_a?(::OpenSSL::PKey::EC) + raise_sign_error!('The given key is not a private key') unless signing_key.private? + + curve_definition = curve_by_name(signing_key.group.curve_name) + key_algorithm = curve_definition[:algorithm] + + raise IncorrectAlgorithm, "payload algorithm is #{alg} but #{key_algorithm} signing key was provided" if alg != key_algorithm + + asn1_to_raw(signing_key.dsa_sign_asn1(OpenSSL::Digest.new(digest).digest(data)), signing_key) + end + + def verify(data:, signature:, verification_key:) + verification_key = self.class.create_public_key_from_point(verification_key) if verification_key.is_a?(::OpenSSL::PKey::EC::Point) + + raise_verify_error!("The given key is a #{verification_key.class}. It has to be an OpenSSL::PKey::EC instance") unless verification_key.is_a?(::OpenSSL::PKey::EC) + + curve_definition = curve_by_name(verification_key.group.curve_name) + key_algorithm = curve_definition[:algorithm] + raise IncorrectAlgorithm, "payload algorithm is #{alg} but #{key_algorithm} verification key was provided" if alg != key_algorithm + + verification_key.dsa_verify_asn1(OpenSSL::Digest.new(digest).digest(data), raw_to_asn1(signature, verification_key)) + rescue OpenSSL::PKey::PKeyError + raise JWT::VerificationError, 'Signature verification raised' + end + + NAMED_CURVES = { + 'prime256v1' => { + algorithm: 'ES256', + digest: 'sha256' + }, + 'secp256r1' => { # alias for prime256v1 + algorithm: 'ES256', + digest: 'sha256' + }, + 'secp384r1' => { + algorithm: 'ES384', + digest: 'sha384' + }, + 'secp521r1' => { + algorithm: 'ES512', + digest: 'sha512' + }, + 'secp256k1' => { + algorithm: 'ES256K', + digest: 'sha256' + } + }.freeze + + NAMED_CURVES.each_value do |v| + register_algorithm(new(v[:algorithm], v[:digest])) + end + + # @api private + def self.curve_by_name(name) + NAMED_CURVES.fetch(name) do + raise UnsupportedEcdsaCurve, "The ECDSA curve '#{name}' is not supported" + end + end + + if ::JWT.openssl_3? + def self.create_public_key_from_point(point) + sequence = OpenSSL::ASN1::Sequence([ + OpenSSL::ASN1::Sequence([OpenSSL::ASN1::ObjectId('id-ecPublicKey'), OpenSSL::ASN1::ObjectId(point.group.curve_name)]), + OpenSSL::ASN1::BitString(point.to_octet_string(:uncompressed)) + ]) + OpenSSL::PKey::EC.new(sequence.to_der) + end + else + def self.create_public_key_from_point(point) + OpenSSL::PKey::EC.new(point.group.curve_name).tap do |key| + key.public_key = point + end + end + end + + private + + attr_reader :digest + + def curve_by_name(name) + self.class.curve_by_name(name) + end + + def raw_to_asn1(signature, private_key) + byte_size = (private_key.group.degree + 7) / 8 + sig_bytes = signature[0..(byte_size - 1)] + sig_char = signature[byte_size..-1] || '' + OpenSSL::ASN1::Sequence.new([sig_bytes, sig_char].map { |int| OpenSSL::ASN1::Integer.new(OpenSSL::BN.new(int, 2)) }).to_der + end + + def asn1_to_raw(signature, public_key) + byte_size = (public_key.group.degree + 7) / 8 + OpenSSL::ASN1.decode(signature).value.map { |value| value.value.to_s(2).rjust(byte_size, "\x00") }.join + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/hmac.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/hmac.rb new file mode 100644 index 000000000..86b3278cd --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/hmac.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +module JWT + module JWA + # Implementation of the HMAC family of algorithms + class Hmac + include JWT::JWA::SigningAlgorithm + + def initialize(alg, digest) + @alg = alg + @digest = digest + end + + def sign(data:, signing_key:) + signing_key ||= '' + raise_verify_error!('HMAC key expected to be a String') unless signing_key.is_a?(String) + + OpenSSL::HMAC.digest(digest.new, signing_key, data) + rescue OpenSSL::HMACError => e + raise_verify_error!('OpenSSL 3.0 does not support nil or empty hmac_secret') if signing_key == '' && e.message == 'EVP_PKEY_new_mac_key: malloc failure' + + raise e + end + + def verify(data:, signature:, verification_key:) + SecurityUtils.secure_compare(signature, sign(data: data, signing_key: verification_key)) + end + + register_algorithm(new('HS256', OpenSSL::Digest::SHA256)) + register_algorithm(new('HS384', OpenSSL::Digest::SHA384)) + register_algorithm(new('HS512', OpenSSL::Digest::SHA512)) + + private + + attr_reader :digest + + # Copy of https://github.com/rails/rails/blob/v7.0.3.1/activesupport/lib/active_support/security_utils.rb + # rubocop:disable Naming/MethodParameterName, Style/StringLiterals, Style/NumericPredicate + module SecurityUtils + # Constant time string comparison, for fixed length strings. + # + # The values compared should be of fixed length, such as strings + # that have already been processed by HMAC. Raises in case of length mismatch. + + if defined?(OpenSSL.fixed_length_secure_compare) + def fixed_length_secure_compare(a, b) + OpenSSL.fixed_length_secure_compare(a, b) + end + else + # :nocov: + def fixed_length_secure_compare(a, b) + raise ArgumentError, "string length mismatch." unless a.bytesize == b.bytesize + + l = a.unpack "C#{a.bytesize}" + + res = 0 + b.each_byte { |byte| res |= byte ^ l.shift } + res == 0 + end + # :nocov: + end + module_function :fixed_length_secure_compare + + # Secure string comparison for strings of variable length. + # + # While a timing attack would not be able to discern the content of + # a secret compared via secure_compare, it is possible to determine + # the secret length. This should be considered when using secure_compare + # to compare weak, short secrets to user input. + def secure_compare(a, b) + a.bytesize == b.bytesize && fixed_length_secure_compare(a, b) + end + module_function :secure_compare + end + # rubocop:enable Naming/MethodParameterName, Style/StringLiterals, Style/NumericPredicate + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/none.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/none.rb new file mode 100644 index 000000000..ddac94956 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/none.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module JWT + module JWA + # Implementation of the none algorithm + class None + include JWT::JWA::SigningAlgorithm + + def initialize + @alg = 'none' + end + + def sign(*) + '' + end + + def verify(*) + true + end + + register_algorithm(new) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ps.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ps.rb new file mode 100644 index 000000000..85ef615a5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/ps.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module JWT + module JWA + # Implementation of the RSASSA-PSS family of algorithms + class Ps + include JWT::JWA::SigningAlgorithm + + def initialize(alg) + @alg = alg + @digest_algorithm = alg.sub('PS', 'sha') + end + + def sign(data:, signing_key:) + raise_sign_error!("The given key is a #{signing_key.class}. It has to be an OpenSSL::PKey::RSA instance.") unless signing_key.is_a?(::OpenSSL::PKey::RSA) + raise_sign_error!('The key length must be greater than or equal to 2048 bits') if signing_key.n.num_bits < 2048 + + signing_key.sign_pss(digest_algorithm, data, salt_length: :digest, mgf1_hash: digest_algorithm) + end + + def verify(data:, signature:, verification_key:) + verification_key.verify_pss(digest_algorithm, signature, data, salt_length: :auto, mgf1_hash: digest_algorithm) + rescue OpenSSL::PKey::PKeyError + raise JWT::VerificationError, 'Signature verification raised' + end + + register_algorithm(new('PS256')) + register_algorithm(new('PS384')) + register_algorithm(new('PS512')) + + private + + attr_reader :digest_algorithm + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/rsa.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/rsa.rb new file mode 100644 index 000000000..d25b57646 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/rsa.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module JWT + module JWA + # Implementation of the RSA family of algorithms + class Rsa + include JWT::JWA::SigningAlgorithm + + def initialize(alg) + @alg = alg + @digest = alg.sub('RS', 'SHA') + end + + def sign(data:, signing_key:) + raise_sign_error!("The given key is a #{signing_key.class}. It has to be an OpenSSL::PKey::RSA instance") unless signing_key.is_a?(OpenSSL::PKey::RSA) + raise_sign_error!('The key length must be greater than or equal to 2048 bits') if signing_key.n.num_bits < 2048 + + signing_key.sign(OpenSSL::Digest.new(digest), data) + end + + def verify(data:, signature:, verification_key:) + verification_key.verify(OpenSSL::Digest.new(digest), signature, data) + rescue OpenSSL::PKey::PKeyError + raise JWT::VerificationError, 'Signature verification raised' + end + + register_algorithm(new('RS256')) + register_algorithm(new('RS384')) + register_algorithm(new('RS512')) + + private + + attr_reader :digest + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/signing_algorithm.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/signing_algorithm.rb new file mode 100644 index 000000000..b4590a8b0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/signing_algorithm.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +module JWT + # JSON Web Algorithms + module JWA + # Base functionality for signing algorithms + module SigningAlgorithm + # Class methods for the SigningAlgorithm module + module ClassMethods + def register_algorithm(algo) + ::JWT::JWA.register_algorithm(algo) + end + end + + def self.included(klass) + klass.extend(ClassMethods) + end + + attr_reader :alg + + def valid_alg?(alg_to_check) + alg&.casecmp(alg_to_check)&.zero? == true + end + + def header(*) + { 'alg' => alg } + end + + def sign(*) + raise_sign_error!('Algorithm implementation is missing the sign method') + end + + def verify(*) + raise_verify_error!('Algorithm implementation is missing the verify method') + end + + def raise_verify_error!(message) + raise(DecodeError.new(message).tap { |e| e.set_backtrace(caller(1)) }) + end + + def raise_sign_error!(message) + raise(EncodeError.new(message).tap { |e| e.set_backtrace(caller(1)) }) + end + end + + class << self + def register_algorithm(algo) + algorithms[algo.alg.to_s.downcase] = algo + end + + def find(algo) + algorithms.fetch(algo.to_s.downcase, Unsupported) + end + + private + + def algorithms + @algorithms ||= {} + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/unsupported.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/unsupported.rb new file mode 100644 index 000000000..beb4be1f4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwa/unsupported.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module JWT + module JWA + # Represents an unsupported algorithm + module Unsupported + class << self + include JWT::JWA::SigningAlgorithm + + def sign(*) + raise_sign_error!('Unsupported signing method') + end + + def verify(*) + raise JWT::VerificationError, 'Algorithm not supported' + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk.rb new file mode 100644 index 000000000..d717bac78 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require_relative 'jwk/key_finder' +require_relative 'jwk/set' + +module JWT + # JSON Web Key (JWK) + module JWK + class << self + def create_from(key, params = nil, options = {}) + if key.is_a?(Hash) + jwk_kty = key[:kty] || key['kty'] + raise JWT::JWKError, 'Key type (kty) not provided' unless jwk_kty + + return mappings.fetch(jwk_kty.to_s) do |kty| + raise JWT::JWKError, "Key type #{kty} not supported" + end.new(key, params, options) + end + + mappings.fetch(key.class) do |klass| + raise JWT::JWKError, "Cannot create JWK from a #{klass.name}" + end.new(key, params, options) + end + + def classes + @mappings = nil # reset the cached mappings + @classes ||= [] + end + + alias new create_from + alias import create_from + + private + + def mappings + @mappings ||= generate_mappings + end + + def generate_mappings + classes.each_with_object({}) do |klass, hash| + next unless klass.const_defined?('KTYS') + + Array(klass::KTYS).each do |kty| + hash[kty] = klass + end + end + end + end + end +end + +require_relative 'jwk/key_base' +require_relative 'jwk/ec' +require_relative 'jwk/rsa' +require_relative 'jwk/hmac' diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/ec.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/ec.rb new file mode 100644 index 000000000..e240aa049 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/ec.rb @@ -0,0 +1,240 @@ +# frozen_string_literal: true + +require 'forwardable' + +module JWT + module JWK + # JWK representation for Elliptic Curve (EC) keys + class EC < KeyBase # rubocop:disable Metrics/ClassLength + KTY = 'EC' + KTYS = [KTY, OpenSSL::PKey::EC, JWT::JWK::EC].freeze + BINARY = 2 + EC_PUBLIC_KEY_ELEMENTS = %i[kty crv x y].freeze + EC_PRIVATE_KEY_ELEMENTS = %i[d].freeze + EC_KEY_ELEMENTS = (EC_PRIVATE_KEY_ELEMENTS + EC_PUBLIC_KEY_ELEMENTS).freeze + ZERO_BYTE = "\0".b.freeze + + def initialize(key, params = nil, options = {}) + params ||= {} + + # For backwards compatibility when kid was a String + params = { kid: params } if params.is_a?(String) + + key_params = extract_key_params(key) + + params = params.transform_keys(&:to_sym) + check_jwk_params!(key_params, params) + + super(options, key_params.merge(params)) + end + + def keypair + ec_key + end + + def private? + ec_key.private_key? + end + + def signing_key + ec_key + end + + def verify_key + ec_key + end + + def public_key + ec_key + end + + def members + EC_PUBLIC_KEY_ELEMENTS.each_with_object({}) { |i, h| h[i] = self[i] } + end + + def export(options = {}) + exported = parameters.clone + exported.reject! { |k, _| EC_PRIVATE_KEY_ELEMENTS.include? k } unless private? && options[:include_private] == true + exported + end + + def key_digest + _crv, x_octets, y_octets = keypair_components(ec_key) + sequence = OpenSSL::ASN1::Sequence([OpenSSL::ASN1::Integer.new(OpenSSL::BN.new(x_octets, BINARY)), + OpenSSL::ASN1::Integer.new(OpenSSL::BN.new(y_octets, BINARY))]) + OpenSSL::Digest::SHA256.hexdigest(sequence.to_der) + end + + def []=(key, value) + raise ArgumentError, 'cannot overwrite cryptographic key attributes' if EC_KEY_ELEMENTS.include?(key.to_sym) + + super + end + + def jwa + return super if self[:alg] + + curve_name = self.class.to_openssl_curve(self[:crv]) + JWA.resolve(JWA::Ecdsa.curve_by_name(curve_name)[:algorithm]) + end + + private + + def ec_key + @ec_key ||= create_ec_key(self[:crv], self[:x], self[:y], self[:d]) + end + + def extract_key_params(key) + case key + when JWT::JWK::EC + key.export(include_private: true) + when OpenSSL::PKey::EC # Accept OpenSSL key as input + @ec_key = key # Preserve the object to avoid recreation + parse_ec_key(key) + when Hash + key.transform_keys(&:to_sym) + else + raise ArgumentError, 'key must be of type OpenSSL::PKey::EC or Hash with key parameters' + end + end + + def check_jwk_params!(key_params, params) + raise ArgumentError, 'cannot overwrite cryptographic key attributes' unless (EC_KEY_ELEMENTS & params.keys).empty? + raise JWT::JWKError, "Incorrect 'kty' value: #{key_params[:kty]}, expected #{KTY}" unless key_params[:kty] == KTY + raise JWT::JWKError, 'Key format is invalid for EC' unless key_params[:crv] && key_params[:x] && key_params[:y] + end + + def keypair_components(ec_keypair) + encoded_point = ec_keypair.public_key.to_bn.to_s(BINARY) + case ec_keypair.group.curve_name + when 'prime256v1' + crv = 'P-256' + x_octets, y_octets = encoded_point.unpack('xa32a32') + when 'secp256k1' + crv = 'P-256K' + x_octets, y_octets = encoded_point.unpack('xa32a32') + when 'secp384r1' + crv = 'P-384' + x_octets, y_octets = encoded_point.unpack('xa48a48') + when 'secp521r1' + crv = 'P-521' + x_octets, y_octets = encoded_point.unpack('xa66a66') + else + raise JWT::JWKError, "Unsupported curve '#{ec_keypair.group.curve_name}'" + end + [crv, x_octets, y_octets] + end + + def encode_octets(octets) + return unless octets + + ::JWT::Base64.url_encode(octets) + end + + def parse_ec_key(key) + crv, x_octets, y_octets = keypair_components(key) + octets = key.private_key&.to_bn&.to_s(BINARY) + { + kty: KTY, + crv: crv, + x: encode_octets(x_octets), + y: encode_octets(y_octets), + d: encode_octets(octets) + }.compact + end + + def create_point(jwk_crv, jwk_x, jwk_y) + curve = EC.to_openssl_curve(jwk_crv) + x_octets = decode_octets(jwk_x) + y_octets = decode_octets(jwk_y) + + # The details of the `Point` instantiation are covered in: + # - https://docs.ruby-lang.org/en/2.4.0/OpenSSL/PKey/EC.html + # - https://www.openssl.org/docs/manmaster/man3/EC_POINT_new.html + # - https://tools.ietf.org/html/rfc5480#section-2.2 + # - https://www.secg.org/SEC1-Ver-1.0.pdf + # Section 2.3.3 of the last of these references specifies that the + # encoding of an uncompressed point consists of the byte `0x04` followed + # by the x value then the y value. + OpenSSL::PKey::EC::Point.new( + OpenSSL::PKey::EC::Group.new(curve), + OpenSSL::BN.new([0x04, x_octets, y_octets].pack('Ca*a*'), 2) + ) + end + + if ::JWT.openssl_3? + def create_ec_key(jwk_crv, jwk_x, jwk_y, jwk_d) + point = create_point(jwk_crv, jwk_x, jwk_y) + + return ::JWT::JWA::Ecdsa.create_public_key_from_point(point) unless jwk_d + + # https://datatracker.ietf.org/doc/html/rfc5915.html + # ECPrivateKey ::= SEQUENCE { + # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), + # privateKey OCTET STRING, + # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + # publicKey [1] BIT STRING OPTIONAL + # } + + sequence = OpenSSL::ASN1::Sequence([ + OpenSSL::ASN1::Integer(1), + OpenSSL::ASN1::OctetString(OpenSSL::BN.new(decode_octets(jwk_d), 2).to_s(2)), + OpenSSL::ASN1::ObjectId(point.group.curve_name, 0, :EXPLICIT), + OpenSSL::ASN1::BitString(point.to_octet_string(:uncompressed), 1, :EXPLICIT) + ]) + OpenSSL::PKey::EC.new(sequence.to_der) + end + else + def create_ec_key(jwk_crv, jwk_x, jwk_y, jwk_d) + point = create_point(jwk_crv, jwk_x, jwk_y) + + ::JWT::JWA::Ecdsa.create_public_key_from_point(point).tap do |key| + key.private_key = OpenSSL::BN.new(decode_octets(jwk_d), 2) if jwk_d + end + end + end + + def decode_octets(base64_encoded_coordinate) + bytes = ::JWT::Base64.url_decode(base64_encoded_coordinate) + # Some base64 encoders on some platform omit a single 0-byte at + # the start of either Y or X coordinate of the elliptic curve point. + # This leads to an encoding error when data is passed to OpenSSL BN. + # It is know to have happened to exported JWKs on a Java application and + # on a Flutter/Dart application (both iOS and Android). All that is + # needed to fix the problem is adding a leading 0-byte. We know the + # required byte is 0 because with any other byte the point is no longer + # on the curve - and OpenSSL will actually communicate this via another + # exception. The indication of a stripped byte will be the fact that the + # coordinates - once decoded into bytes - should always be an even + # bytesize. For example, with a P-521 curve, both x and y must be 66 bytes. + # With a P-256 curve, both x and y must be 32 and so on. The simplest way + # to check for this truncation is thus to check whether the number of bytes + # is odd, and restore the leading 0-byte if it is. + if bytes.bytesize.odd? + ZERO_BYTE + bytes + else + bytes + end + end + + class << self + def import(jwk_data) + new(jwk_data) + end + + def to_openssl_curve(crv) + # The JWK specs and OpenSSL use different names for the same curves. + # See https://tools.ietf.org/html/rfc5480#section-2.1.1.1 for some + # pointers on different names for common curves. + case crv + when 'P-256' then 'prime256v1' + when 'P-384' then 'secp384r1' + when 'P-521' then 'secp521r1' + when 'P-256K' then 'secp256k1' + else raise JWT::JWKError, 'Invalid curve provided' + end + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/hmac.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/hmac.rb new file mode 100644 index 000000000..6813367ac --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/hmac.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +module JWT + module JWK + # JWK for HMAC keys + class HMAC < KeyBase + KTY = 'oct' + KTYS = [KTY, String, JWT::JWK::HMAC].freeze + HMAC_PUBLIC_KEY_ELEMENTS = %i[kty].freeze + HMAC_PRIVATE_KEY_ELEMENTS = %i[k].freeze + HMAC_KEY_ELEMENTS = (HMAC_PRIVATE_KEY_ELEMENTS + HMAC_PUBLIC_KEY_ELEMENTS).freeze + + def initialize(key, params = nil, options = {}) + params ||= {} + + # For backwards compatibility when kid was a String + params = { kid: params } if params.is_a?(String) + + key_params = extract_key_params(key) + + params = params.transform_keys(&:to_sym) + check_jwk(key_params, params) + + super(options, key_params.merge(params)) + end + + def keypair + secret + end + + def private? + true + end + + def public_key + nil + end + + def verify_key + secret + end + + def signing_key + secret + end + + # See https://tools.ietf.org/html/rfc7517#appendix-A.3 + def export(options = {}) + exported = parameters.clone + exported.reject! { |k, _| HMAC_PRIVATE_KEY_ELEMENTS.include? k } unless private? && options[:include_private] == true + exported + end + + def members + HMAC_KEY_ELEMENTS.each_with_object({}) { |i, h| h[i] = self[i] } + end + + def key_digest + sequence = OpenSSL::ASN1::Sequence([OpenSSL::ASN1::UTF8String.new(signing_key), + OpenSSL::ASN1::UTF8String.new(KTY)]) + OpenSSL::Digest::SHA256.hexdigest(sequence.to_der) + end + + def []=(key, value) + raise ArgumentError, 'cannot overwrite cryptographic key attributes' if HMAC_KEY_ELEMENTS.include?(key.to_sym) + + super + end + + private + + def secret + @secret ||= ::JWT::Base64.url_decode(self[:k]) + end + + def extract_key_params(key) + case key + when JWT::JWK::HMAC + key.export(include_private: true) + when String # Accept String key as input + { kty: KTY, k: ::JWT::Base64.url_encode(key) } + when Hash + key.transform_keys(&:to_sym) + else + raise ArgumentError, 'key must be of type String or Hash with key parameters' + end + end + + def check_jwk(keypair, params) + raise ArgumentError, 'cannot overwrite cryptographic key attributes' unless (HMAC_KEY_ELEMENTS & params.keys).empty? + raise JWT::JWKError, "Incorrect 'kty' value: #{keypair[:kty]}, expected #{KTY}" unless keypair[:kty] == KTY + raise JWT::JWKError, 'Key format is invalid for HMAC' unless keypair[:k] + end + + class << self + def import(jwk_data) + new(jwk_data) + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_base.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_base.rb new file mode 100644 index 000000000..ac9d9b911 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_base.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +module JWT + module JWK + # Base for JWK implementations + class KeyBase + def self.inherited(klass) + super + ::JWT::JWK.classes << klass + end + + def initialize(options, params = {}) + options ||= {} + + @parameters = params.transform_keys(&:to_sym) # Uniform interface + + # For backwards compatibility, kid_generator may be specified in the parameters + options[:kid_generator] ||= @parameters.delete(:kid_generator) + + # Make sure the key has a kid + kid_generator = options[:kid_generator] || ::JWT.configuration.jwk.kid_generator + self[:kid] ||= kid_generator.new(self).generate + end + + def kid + self[:kid] + end + + def hash + self[:kid].hash + end + + def [](key) + @parameters[key.to_sym] + end + + def []=(key, value) + @parameters[key.to_sym] = value + end + + def ==(other) + other.is_a?(::JWT::JWK::KeyBase) && self[:kid] == other[:kid] + end + + def verify(**kwargs) + jwa.verify(**kwargs, verification_key: verify_key) + end + + def sign(**kwargs) + jwa.sign(**kwargs, signing_key: signing_key) + end + + alias eql? == + + def <=>(other) + return nil unless other.is_a?(::JWT::JWK::KeyBase) + + self[:kid] <=> other[:kid] + end + + def jwa + raise JWT::JWKError, 'Could not resolve the JWA, the "alg" parameter is missing' unless self[:alg] + + JWA.resolve(self[:alg]).tap do |jwa| + raise JWT::JWKError, 'none algorithm usage not supported via JWK' if jwa.is_a?(JWA::None) + end + end + + attr_reader :parameters + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_finder.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_finder.rb new file mode 100644 index 000000000..c7387841e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/key_finder.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +module JWT + module JWK + # JSON Web Key keyfinder + # To find the key for a given kid + class KeyFinder + # Initializes a new KeyFinder instance. + # @param [Hash] options the options to create a KeyFinder with + # @option options [Proc, JWT::JWK::Set] :jwks the jwks or a loader proc + # @option options [Boolean] :allow_nil_kid whether to allow nil kid + # @option options [Array] :key_fields the fields to use for key matching, + # the order of the fields are used to determine + # the priority of the keys. + def initialize(options) + @allow_nil_kid = options[:allow_nil_kid] + jwks_or_loader = options[:jwks] + + @jwks_loader = if jwks_or_loader.respond_to?(:call) + jwks_or_loader + else + ->(_options) { jwks_or_loader } + end + + @key_fields = options[:key_fields] || %i[kid] + end + + # Returns the verification key for the given kid + # @param [String] kid the key id + def key_for(kid, key_field = :kid) + raise ::JWT::DecodeError, "Invalid type for #{key_field} header parameter" unless kid.nil? || kid.is_a?(String) + + jwk = resolve_key(kid, key_field) + + raise ::JWT::DecodeError, 'No keys found in jwks' unless @jwks.any? + raise ::JWT::DecodeError, "Could not find public key for kid #{kid}" unless jwk + + jwk.verify_key + end + + # Returns the key for the given token + # @param [JWT::EncodedToken] token the token + def call(token) + @key_fields.each do |key_field| + field_value = token.header[key_field.to_s] + + return key_for(field_value, key_field) if field_value + end + + raise ::JWT::DecodeError, 'No key id (kid) or x5t found from token headers' unless @allow_nil_kid + + kid = token.header['kid'] + key_for(kid) + end + + private + + def resolve_key(kid, key_field) + key_matcher = ->(key) { (kid.nil? && @allow_nil_kid) || key[key_field] == kid } + + # First try without invalidation to facilitate application caching + @jwks ||= JWT::JWK::Set.new(@jwks_loader.call(key_field => kid)) + jwk = @jwks.find { |key| key_matcher.call(key) } + + return jwk if jwk + + # Second try, invalidate for backwards compatibility + @jwks = JWT::JWK::Set.new(@jwks_loader.call(invalidate: true, kid_not_found: true, key_field => kid)) + @jwks.find { |key| key_matcher.call(key) } + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/kid_as_key_digest.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/kid_as_key_digest.rb new file mode 100644 index 000000000..08a1d2a70 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/kid_as_key_digest.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module JWT + module JWK + # @api private + class KidAsKeyDigest + def initialize(jwk) + @jwk = jwk + end + + def generate + @jwk.key_digest + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/rsa.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/rsa.rb new file mode 100644 index 000000000..c4918602c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/rsa.rb @@ -0,0 +1,206 @@ +# frozen_string_literal: true + +module JWT + module JWK + # JSON Web Key (JWK) representation of a RSA key + class RSA < KeyBase # rubocop:disable Metrics/ClassLength + BINARY = 2 + KTY = 'RSA' + KTYS = [KTY, OpenSSL::PKey::RSA, JWT::JWK::RSA].freeze + RSA_PUBLIC_KEY_ELEMENTS = %i[kty n e].freeze + RSA_PRIVATE_KEY_ELEMENTS = %i[d p q dp dq qi].freeze + RSA_KEY_ELEMENTS = (RSA_PRIVATE_KEY_ELEMENTS + RSA_PUBLIC_KEY_ELEMENTS).freeze + + RSA_OPT_PARAMS = %i[p q dp dq qi].freeze + RSA_ASN1_SEQUENCE = (%i[n e d] + RSA_OPT_PARAMS).freeze # https://www.rfc-editor.org/rfc/rfc3447#appendix-A.1.2 + + def initialize(key, params = nil, options = {}) + params ||= {} + + # For backwards compatibility when kid was a String + params = { kid: params } if params.is_a?(String) + + key_params = extract_key_params(key) + + params = params.transform_keys(&:to_sym) + check_jwk_params!(key_params, params) + + super(options, key_params.merge(params)) + end + + def keypair + rsa_key + end + + def private? + rsa_key.private? + end + + def public_key + rsa_key.public_key + end + + def signing_key + rsa_key if private? + end + + def verify_key + rsa_key.public_key + end + + def export(options = {}) + exported = parameters.clone + exported.reject! { |k, _| RSA_PRIVATE_KEY_ELEMENTS.include? k } unless private? && options[:include_private] == true + + exported + end + + def members + RSA_PUBLIC_KEY_ELEMENTS.each_with_object({}) { |i, h| h[i] = self[i] } + end + + def key_digest + sequence = OpenSSL::ASN1::Sequence([OpenSSL::ASN1::Integer.new(public_key.n), + OpenSSL::ASN1::Integer.new(public_key.e)]) + OpenSSL::Digest::SHA256.hexdigest(sequence.to_der) + end + + def []=(key, value) + raise ArgumentError, 'cannot overwrite cryptographic key attributes' if RSA_KEY_ELEMENTS.include?(key.to_sym) + + super + end + + private + + def rsa_key + @rsa_key ||= self.class.create_rsa_key(jwk_attributes(*(RSA_KEY_ELEMENTS - [:kty]))) + end + + def extract_key_params(key) + case key + when JWT::JWK::RSA + key.export(include_private: true) + when OpenSSL::PKey::RSA # Accept OpenSSL key as input + @rsa_key = key # Preserve the object to avoid recreation + parse_rsa_key(key) + when Hash + key.transform_keys(&:to_sym) + else + raise ArgumentError, 'key must be of type OpenSSL::PKey::RSA or Hash with key parameters' + end + end + + def check_jwk_params!(key_params, params) + raise ArgumentError, 'cannot overwrite cryptographic key attributes' unless (RSA_KEY_ELEMENTS & params.keys).empty? + raise JWT::JWKError, "Incorrect 'kty' value: #{key_params[:kty]}, expected #{KTY}" unless key_params[:kty] == KTY + raise JWT::JWKError, 'Key format is invalid for RSA' unless key_params[:n] && key_params[:e] + end + + def parse_rsa_key(key) + { + kty: KTY, + n: encode_open_ssl_bn(key.n), + e: encode_open_ssl_bn(key.e), + d: encode_open_ssl_bn(key.d), + p: encode_open_ssl_bn(key.p), + q: encode_open_ssl_bn(key.q), + dp: encode_open_ssl_bn(key.dmp1), + dq: encode_open_ssl_bn(key.dmq1), + qi: encode_open_ssl_bn(key.iqmp) + }.compact + end + + def jwk_attributes(*attributes) + attributes.each_with_object({}) do |attribute, hash| + hash[attribute] = decode_open_ssl_bn(self[attribute]) + end + end + + def encode_open_ssl_bn(key_part) + return unless key_part + + ::JWT::Base64.url_encode(key_part.to_s(BINARY)) + end + + def decode_open_ssl_bn(jwk_data) + self.class.decode_open_ssl_bn(jwk_data) + end + + class << self + def import(jwk_data) + new(jwk_data) + end + + def decode_open_ssl_bn(jwk_data) + return nil unless jwk_data + + OpenSSL::BN.new(::JWT::Base64.url_decode(jwk_data), BINARY) + end + + def create_rsa_key_using_der(rsa_parameters) + validate_rsa_parameters!(rsa_parameters) + + sequence = RSA_ASN1_SEQUENCE.each_with_object([]) do |key, arr| + next if rsa_parameters[key].nil? + + arr << OpenSSL::ASN1::Integer.new(rsa_parameters[key]) + end + + if sequence.size > 2 # Append "two-prime" version for private key + sequence.unshift(OpenSSL::ASN1::Integer.new(0)) + + raise JWT::JWKError, 'Creating a RSA key with a private key requires the CRT parameters to be defined' if sequence.size < RSA_ASN1_SEQUENCE.size + end + + OpenSSL::PKey::RSA.new(OpenSSL::ASN1::Sequence(sequence).to_der) + end + + def create_rsa_key_using_sets(rsa_parameters) + validate_rsa_parameters!(rsa_parameters) + + OpenSSL::PKey::RSA.new.tap do |rsa_key| + rsa_key.set_key(rsa_parameters[:n], rsa_parameters[:e], rsa_parameters[:d]) + rsa_key.set_factors(rsa_parameters[:p], rsa_parameters[:q]) if rsa_parameters[:p] && rsa_parameters[:q] + rsa_key.set_crt_params(rsa_parameters[:dp], rsa_parameters[:dq], rsa_parameters[:qi]) if rsa_parameters[:dp] && rsa_parameters[:dq] && rsa_parameters[:qi] + end + end + + # :nocov: + # Before openssl 2.0, we need to use the accessors to set the key + def create_rsa_key_using_accessors(rsa_parameters) # rubocop:disable Metrics/AbcSize + validate_rsa_parameters!(rsa_parameters) + + OpenSSL::PKey::RSA.new.tap do |rsa_key| + rsa_key.n = rsa_parameters[:n] + rsa_key.e = rsa_parameters[:e] + rsa_key.d = rsa_parameters[:d] if rsa_parameters[:d] + rsa_key.p = rsa_parameters[:p] if rsa_parameters[:p] + rsa_key.q = rsa_parameters[:q] if rsa_parameters[:q] + rsa_key.dmp1 = rsa_parameters[:dp] if rsa_parameters[:dp] + rsa_key.dmq1 = rsa_parameters[:dq] if rsa_parameters[:dq] + rsa_key.iqmp = rsa_parameters[:qi] if rsa_parameters[:qi] + end + end + # :nocov: + + def validate_rsa_parameters!(rsa_parameters) + return unless rsa_parameters.key?(:d) + + parameters = RSA_OPT_PARAMS - rsa_parameters.keys + return if parameters.empty? || parameters.size == RSA_OPT_PARAMS.size + + raise JWT::JWKError, 'When one of p, q, dp, dq or qi is given all the other optimization parameters also needs to be defined' # https://www.rfc-editor.org/rfc/rfc7518.html#section-6.3.2 + end + + if ::JWT.openssl_3? + alias create_rsa_key create_rsa_key_using_der + elsif OpenSSL::PKey::RSA.new.respond_to?(:set_key) + alias create_rsa_key create_rsa_key_using_sets + else + alias create_rsa_key create_rsa_key_using_accessors + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/set.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/set.rb new file mode 100644 index 000000000..6f93e56ee --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/set.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +require 'forwardable' + +module JWT + module JWK + # JSON Web Key Set (JWKS) representation + # https://tools.ietf.org/html/rfc7517 + class Set + include Enumerable + extend Forwardable + + attr_reader :keys + + def initialize(jwks = nil, options = {}) # rubocop:disable Metrics/CyclomaticComplexity + jwks ||= {} + + @keys = case jwks + when JWT::JWK::Set # Simple duplication + jwks.keys + when JWT::JWK::KeyBase # Singleton + [jwks] + when Hash + jwks = jwks.transform_keys(&:to_sym) + [*jwks[:keys]].map { |k| JWT::JWK.new(k, nil, options) } + when Array + jwks.map { |k| JWT::JWK.new(k, nil, options) } + else + raise ArgumentError, 'Can only create new JWKS from Hash, Array and JWK' + end + end + + def export(options = {}) + { keys: @keys.map { |k| k.export(options) } } + end + + def_delegators :@keys, :each, :size, :delete, :dig + + def select!(&block) + return @keys.select! unless block + + self if @keys.select!(&block) + end + + def reject!(&block) + return @keys.reject! unless block + + self if @keys.reject!(&block) + end + + def uniq!(&block) + self if @keys.uniq!(&block) + end + + def merge(enum) + @keys += JWT::JWK::Set.new(enum.to_a).keys + self + end + + def union(enum) + dup.merge(enum) + end + + def add(key) + @keys << JWT::JWK.new(key) + self + end + + def ==(other) + other.is_a?(JWT::JWK::Set) && keys.sort == other.keys.sort + end + + alias eql? == + alias filter! select! + alias length size + # For symbolic manipulation + alias | union + alias + union + alias << add + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/thumbprint.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/thumbprint.rb new file mode 100644 index 000000000..3583f5780 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/jwk/thumbprint.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module JWT + module JWK + # https://tools.ietf.org/html/rfc7638 + class Thumbprint + attr_reader :jwk + + def initialize(jwk) + @jwk = jwk + end + + def generate + ::Base64.urlsafe_encode64( + Digest::SHA256.digest( + JWT::JSON.generate( + jwk.members.sort.to_h + ) + ), padding: false + ) + end + + alias to_s generate + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/token.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/token.rb new file mode 100644 index 000000000..0c643886f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/token.rb @@ -0,0 +1,131 @@ +# frozen_string_literal: true + +module JWT + # Represents a JWT token + # + # Basic token signed using the HS256 algorithm: + # + # token = JWT::Token.new(payload: {pay: 'load'}) + # token.sign!(algorithm: 'HS256', key: 'secret') + # token.jwt # => eyJhb.... + # + # Custom headers will be combined with generated headers: + # token = JWT::Token.new(payload: {pay: 'load'}, header: {custom: "value"}) + # token.sign!(algorithm: 'HS256', key: 'secret') + # token.header # => {"custom"=>"value", "alg"=>"HS256"} + # + class Token + # Initializes a new Token instance. + # + # @param header [Hash] the header of the JWT token. + # @param payload [Hash] the payload of the JWT token. + def initialize(payload:, header: {}) + @header = header&.transform_keys(&:to_s) + @payload = payload + end + + # Returns the decoded signature of the JWT token. + # + # @return [String] the decoded signature of the JWT token. + def signature + @signature ||= ::JWT::Base64.url_decode(encoded_signature || '') + end + + # Returns the encoded signature of the JWT token. + # + # @return [String] the encoded signature of the JWT token. + def encoded_signature + @encoded_signature ||= ::JWT::Base64.url_encode(signature) + end + + # Returns the decoded header of the JWT token. + # + # @return [Hash] the header of the JWT token. + attr_reader :header + + # Returns the encoded header of the JWT token. + # + # @return [String] the encoded header of the JWT token. + def encoded_header + @encoded_header ||= ::JWT::Base64.url_encode(JWT::JSON.generate(header)) + end + + # Returns the payload of the JWT token. + # + # @return [Hash] the payload of the JWT token. + attr_reader :payload + + # Returns the encoded payload of the JWT token. + # + # @return [String] the encoded payload of the JWT token. + def encoded_payload + @encoded_payload ||= ::JWT::Base64.url_encode(JWT::JSON.generate(payload)) + end + + # Returns the signing input of the JWT token. + # + # @return [String] the signing input of the JWT token. + def signing_input + @signing_input ||= [encoded_header, encoded_payload].join('.') + end + + # Returns the JWT token as a string. + # + # @return [String] the JWT token as a string. + # @raise [JWT::EncodeError] if the token is not signed or other encoding issues + def jwt + @jwt ||= (@signature && [encoded_header, @detached_payload ? '' : encoded_payload, encoded_signature].join('.')) || raise(::JWT::EncodeError, 'Token is not signed') + end + + # Detaches the payload according to https://datatracker.ietf.org/doc/html/rfc7515#appendix-F + # + def detach_payload! + @detached_payload = true + + nil + end + + # Signs the JWT token. + # + # @param key [String, JWT::JWK::KeyBase] the key to use for signing. + # @param algorithm [String, Object] the algorithm to use for signing. + # @return [void] + # @raise [JWT::EncodeError] if the token is already signed or other problems when signing + def sign!(key:, algorithm:) + raise ::JWT::EncodeError, 'Token already signed' if @signature + + JWA.create_signer(algorithm: algorithm, key: key).tap do |signer| + header.merge!(signer.jwa.header) { |_key, old, _new| old } + @signature = signer.sign(data: signing_input) + end + + nil + end + + # Verifies the claims of the token. + # @param options [Array, Hash] the claims to verify. + # @raise [JWT::DecodeError] if the claims are invalid. + def verify_claims!(*options) + Claims::Verifier.verify!(self, *options) + end + + # Returns the errors of the claims of the token. + # @param options [Array, Hash] the claims to verify. + # @return [Array] the errors of the claims. + def claim_errors(*options) + Claims::Verifier.errors(self, *options) + end + + # Returns whether the claims of the token are valid. + # @param options [Array, Hash] the claims to verify. + # @return [Boolean] whether the claims are valid. + def valid_claims?(*options) + claim_errors(*options).empty? + end + + # Returns the JWT token as a string. + # + # @return [String] the JWT token as a string. + alias to_s jwt + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/version.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/version.rb new file mode 100644 index 000000000..34436db0c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/version.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +# JSON Web Token implementation +# +# Should be up to date with the latest spec: +# https://tools.ietf.org/html/rfc7519 +module JWT + # Returns the gem version of the JWT library. + # + # @return [Gem::Version] the gem version. + def self.gem_version + Gem::Version.new(VERSION::STRING) + end + + # Version constants + module VERSION + MAJOR = 3 + MINOR = 1 + TINY = 2 + PRE = nil + + STRING = [MAJOR, MINOR, TINY, PRE].compact.join('.') + end + + # Checks if the OpenSSL version is 3 or greater. + # + # @return [Boolean] true if OpenSSL version is 3 or greater, false otherwise. + # @api private + def self.openssl_3? + return false if OpenSSL::OPENSSL_VERSION.include?('LibreSSL') + + true if 3 * 0x10000000 <= OpenSSL::OPENSSL_VERSION_NUMBER + end + + # Checks if there is an OpenSSL 3 HMAC empty key regression. + # + # @return [Boolean] true if there is an OpenSSL 3 HMAC empty key regression, false otherwise. + # @api private + def self.openssl_3_hmac_empty_key_regression? + openssl_3? && openssl_version <= ::Gem::Version.new('3.0.0') + end + + # Returns the OpenSSL version. + # + # @return [Gem::Version] the OpenSSL version. + # @api private + def self.openssl_version + @openssl_version ||= ::Gem::Version.new(OpenSSL::VERSION) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/x5c_key_finder.rb b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/x5c_key_finder.rb new file mode 100644 index 000000000..265408552 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/lib/jwt/x5c_key_finder.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module JWT + # If the x5c header certificate chain can be validated by trusted root + # certificates, and none of the certificates are revoked, returns the public + # key from the first certificate. + # See https://tools.ietf.org/html/rfc7515#section-4.1.6 + class X5cKeyFinder + def initialize(root_certificates, crls = nil) + raise ArgumentError, 'Root certificates must be specified' unless root_certificates + + @store = build_store(root_certificates, crls) + end + + def from(x5c_header_or_certificates) + signing_certificate, *certificate_chain = parse_certificates(x5c_header_or_certificates) + store_context = OpenSSL::X509::StoreContext.new(@store, signing_certificate, certificate_chain) + + if store_context.verify + signing_certificate.public_key + else + error = "Certificate verification failed: #{store_context.error_string}." + if (current_cert = store_context.current_cert) + error = "#{error} Certificate subject: #{current_cert.subject}." + end + + raise JWT::VerificationError, error + end + end + + private + + def build_store(root_certificates, crls) + store = OpenSSL::X509::Store.new + store.purpose = OpenSSL::X509::PURPOSE_ANY + store.flags = OpenSSL::X509::V_FLAG_CRL_CHECK | OpenSSL::X509::V_FLAG_CRL_CHECK_ALL + root_certificates.each { |certificate| store.add_cert(certificate) } + crls&.each { |crl| store.add_crl(crl) } + store + end + + def parse_certificates(x5c_header_or_certificates) + if x5c_header_or_certificates.all? { |obj| obj.is_a?(OpenSSL::X509::Certificate) } + x5c_header_or_certificates + else + x5c_header_or_certificates.map do |encoded| + OpenSSL::X509::Certificate.new(::JWT::Base64.url_decode(encoded)) + end + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/ruby-jwt.gemspec b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/ruby-jwt.gemspec new file mode 100644 index 000000000..1c469c462 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/jwt-3.1.2/ruby-jwt.gemspec @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +lib = File.expand_path('lib', __dir__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'jwt/version' + +Gem::Specification.new do |spec| + spec.name = 'jwt' + spec.version = JWT.gem_version + spec.authors = [ + 'Tim Rudat' + ] + spec.email = 'timrudat@gmail.com' + spec.summary = 'JSON Web Token implementation in Ruby' + spec.description = 'A pure ruby implementation of the RFC 7519 OAuth JSON Web Token (JWT) standard.' + spec.homepage = 'https://github.com/jwt/ruby-jwt' + spec.license = 'MIT' + spec.required_ruby_version = '>= 2.5' + spec.metadata = { + 'bug_tracker_uri' => 'https://github.com/jwt/ruby-jwt/issues', + 'changelog_uri' => "https://github.com/jwt/ruby-jwt/blob/v#{JWT.gem_version}/CHANGELOG.md", + 'rubygems_mfa_required' => 'true' + } + + spec.files = `git ls-files -z`.split("\x0").reject do |f| + f.match(%r{^(spec|gemfiles|coverage|bin)/}) || # Irrelevant folders + f.match(/^\.+/) || # Files and folders starting with . + f.match(/^(Appraisals|Gemfile|Rakefile)$/) # Irrelevant files + end + + spec.executables = [] + spec.require_paths = %w[lib] + + spec.add_dependency 'base64' + + spec.add_development_dependency 'appraisal' + spec.add_development_dependency 'bundler' + spec.add_development_dependency 'irb' + spec.add_development_dependency 'logger' + spec.add_development_dependency 'rake' + spec.add_development_dependency 'rspec' + spec.add_development_dependency 'rubocop' + spec.add_development_dependency 'simplecov' +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger.rb new file mode 100644 index 000000000..4205380a6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger.rb @@ -0,0 +1,588 @@ +# frozen_string_literal: true +# logger.rb - simple logging utility +# Copyright (C) 2000-2003, 2005, 2008, 2011 NAKAMURA, Hiroshi . +# +# Documentation:: NAKAMURA, Hiroshi and Gavin Sinclair +# License:: +# You can redistribute it and/or modify it under the same terms of Ruby's +# license; either the dual license version in 2003, or any later version. +# Revision:: $Id$ +# +# A simple system for logging messages. See Logger for more documentation. + +require 'monitor' + +require_relative 'logger/version' +require_relative 'logger/formatter' +require_relative 'logger/log_device' +require_relative 'logger/severity' +require_relative 'logger/errors' + +# == Description +# +# The Logger class provides a simple but sophisticated logging utility that +# you can use to output messages. +# +# The messages have associated levels, such as +INFO+ or +ERROR+ that indicate +# their importance. You can then give the Logger a level, and only messages +# at that level or higher will be printed. +# +# The levels are: +# +# +UNKNOWN+:: An unknown message that should always be logged. +# +FATAL+:: An unhandleable error that results in a program crash. +# +ERROR+:: A handleable error condition. +# +WARN+:: A warning. +# +INFO+:: Generic (useful) information about system operation. +# +DEBUG+:: Low-level information for developers. +# +# For instance, in a production system, you may have your Logger set to +# +INFO+ or even +WARN+. +# When you are developing the system, however, you probably +# want to know about the program's internal state, and would set the Logger to +# +DEBUG+. +# +# *Note*: Logger does not escape or sanitize any messages passed to it. +# Developers should be aware of when potentially malicious data (user-input) +# is passed to Logger, and manually escape the untrusted data: +# +# logger.info("User-input: #{input.dump}") +# logger.info("User-input: %p" % input) +# +# You can use #formatter= for escaping all data. +# +# original_formatter = Logger::Formatter.new +# logger.formatter = proc { |severity, datetime, progname, msg| +# original_formatter.call(severity, datetime, progname, msg.dump) +# } +# logger.info(input) +# +# === Example +# +# This creates a Logger that outputs to the standard output stream, with a +# level of +WARN+: +# +# require 'logger' +# +# logger = Logger.new(STDOUT) +# logger.level = Logger::WARN +# +# logger.debug("Created logger") +# logger.info("Program started") +# logger.warn("Nothing to do!") +# +# path = "a_non_existent_file" +# +# begin +# File.foreach(path) do |line| +# unless line =~ /^(\w+) = (.*)$/ +# logger.error("Line in wrong format: #{line.chomp}") +# end +# end +# rescue => err +# logger.fatal("Caught exception; exiting") +# logger.fatal(err) +# end +# +# Because the Logger's level is set to +WARN+, only the warning, error, and +# fatal messages are recorded. The debug and info messages are silently +# discarded. +# +# === Features +# +# There are several interesting features that Logger provides, like +# auto-rolling of log files, setting the format of log messages, and +# specifying a program name in conjunction with the message. The next section +# shows you how to achieve these things. +# +# +# == HOWTOs +# +# === How to create a logger +# +# The options below give you various choices, in more or less increasing +# complexity. +# +# 1. Create a logger which logs messages to STDERR/STDOUT. +# +# logger = Logger.new(STDERR) +# logger = Logger.new(STDOUT) +# +# 2. Create a logger for the file which has the specified name. +# +# logger = Logger.new('logfile.log') +# +# 3. Create a logger for the specified file. +# +# file = File.open('foo.log', File::WRONLY | File::APPEND) +# # To create new logfile, add File::CREAT like: +# # file = File.open('foo.log', File::WRONLY | File::APPEND | File::CREAT) +# logger = Logger.new(file) +# +# 4. Create a logger which ages the logfile once it reaches a certain size. +# Leave 10 "old" log files where each file is about 1,024,000 bytes. +# +# logger = Logger.new('foo.log', 10, 1024000) +# +# 5. Create a logger which ages the logfile daily/weekly/monthly. +# +# logger = Logger.new('foo.log', 'daily') +# logger = Logger.new('foo.log', 'weekly') +# logger = Logger.new('foo.log', 'monthly') +# +# === How to log a message +# +# Notice the different methods (+fatal+, +error+, +info+) being used to log +# messages of various levels? Other methods in this family are +warn+ and +# +debug+. +add+ is used below to log a message of an arbitrary (perhaps +# dynamic) level. +# +# 1. Message in a block. +# +# logger.fatal { "Argument 'foo' not given." } +# +# 2. Message as a string. +# +# logger.error "Argument #{@foo} mismatch." +# +# 3. With progname. +# +# logger.info('initialize') { "Initializing..." } +# +# 4. With severity. +# +# logger.add(Logger::FATAL) { 'Fatal error!' } +# +# The block form allows you to create potentially complex log messages, +# but to delay their evaluation until and unless the message is +# logged. For example, if we have the following: +# +# logger.debug { "This is a " + potentially + " expensive operation" } +# +# If the logger's level is +INFO+ or higher, no debug messages will be logged, +# and the entire block will not even be evaluated. Compare to this: +# +# logger.debug("This is a " + potentially + " expensive operation") +# +# Here, the string concatenation is done every time, even if the log +# level is not set to show the debug message. +# +# === How to close a logger +# +# logger.close +# +# === Setting severity threshold +# +# 1. Original interface. +# +# logger.sev_threshold = Logger::WARN +# +# 2. Log4r (somewhat) compatible interface. +# +# logger.level = Logger::INFO +# +# # DEBUG < INFO < WARN < ERROR < FATAL < UNKNOWN +# +# 3. Symbol or String (case insensitive) +# +# logger.level = :info +# logger.level = 'INFO' +# +# # :debug < :info < :warn < :error < :fatal < :unknown +# +# 4. Constructor +# +# Logger.new(logdev, level: Logger::INFO) +# Logger.new(logdev, level: :info) +# Logger.new(logdev, level: 'INFO') +# +# == Format +# +# Log messages are rendered in the output stream in a certain format by +# default. The default format and a sample are shown below: +# +# Log format: +# SeverityID, [DateTime #pid] SeverityLabel -- ProgName: message +# +# Log sample: +# I, [1999-03-03T02:34:24.895701 #19074] INFO -- Main: info. +# +# You may change the date and time format via #datetime_format=. +# +# logger.datetime_format = '%Y-%m-%d %H:%M:%S' +# # e.g. "2004-01-03 00:54:26" +# +# or via the constructor. +# +# Logger.new(logdev, datetime_format: '%Y-%m-%d %H:%M:%S') +# +# Or, you may change the overall format via the #formatter= method. +# +# logger.formatter = proc do |severity, datetime, progname, msg| +# "#{datetime}: #{msg}\n" +# end +# # e.g. "2005-09-22 08:51:08 +0900: hello world" +# +# or via the constructor. +# +# Logger.new(logdev, formatter: proc {|severity, datetime, progname, msg| +# "#{datetime}: #{msg}\n" +# }) +# +class Logger + _, name, rev = %w$Id$ + if name + name = name.chomp(",v") + else + name = File.basename(__FILE__) + end + rev ||= "v#{VERSION}" + ProgName = "#{name}/#{rev}" + + include Severity + + # Logging severity threshold (e.g. Logger::INFO). + attr_reader :level + + # Set logging severity threshold. + # + # +severity+:: The Severity of the log message. + def level=(severity) + if severity.is_a?(Integer) + @level = severity + else + case severity.to_s.downcase + when 'debug' + @level = DEBUG + when 'info' + @level = INFO + when 'warn' + @level = WARN + when 'error' + @level = ERROR + when 'fatal' + @level = FATAL + when 'unknown' + @level = UNKNOWN + else + raise ArgumentError, "invalid log level: #{severity}" + end + end + end + + # Program name to include in log messages. + attr_accessor :progname + + # Set date-time format. + # + # +datetime_format+:: A string suitable for passing to +strftime+. + def datetime_format=(datetime_format) + @default_formatter.datetime_format = datetime_format + end + + # Returns the date format being used. See #datetime_format= + def datetime_format + @default_formatter.datetime_format + end + + # Logging formatter, as a +Proc+ that will take four arguments and + # return the formatted message. The arguments are: + # + # +severity+:: The Severity of the log message. + # +time+:: A Time instance representing when the message was logged. + # +progname+:: The #progname configured, or passed to the logger method. + # +msg+:: The _Object_ the user passed to the log message; not necessarily a + # String. + # + # The block should return an Object that can be written to the logging + # device via +write+. The default formatter is used when no formatter is + # set. + attr_accessor :formatter + + alias sev_threshold level + alias sev_threshold= level= + + # Returns +true+ if and only if the current severity level allows for the printing of + # +DEBUG+ messages. + def debug?; level <= DEBUG; end + + # Sets the severity to DEBUG. + def debug!; self.level = DEBUG; end + + # Returns +true+ if and only if the current severity level allows for the printing of + # +INFO+ messages. + def info?; level <= INFO; end + + # Sets the severity to INFO. + def info!; self.level = INFO; end + + # Returns +true+ if and only if the current severity level allows for the printing of + # +WARN+ messages. + def warn?; level <= WARN; end + + # Sets the severity to WARN. + def warn!; self.level = WARN; end + + # Returns +true+ if and only if the current severity level allows for the printing of + # +ERROR+ messages. + def error?; level <= ERROR; end + + # Sets the severity to ERROR. + def error!; self.level = ERROR; end + + # Returns +true+ if and only if the current severity level allows for the printing of + # +FATAL+ messages. + def fatal?; level <= FATAL; end + + # Sets the severity to FATAL. + def fatal!; self.level = FATAL; end + + # + # :call-seq: + # Logger.new(logdev, shift_age = 0, shift_size = 1048576) + # Logger.new(logdev, shift_age = 'weekly') + # Logger.new(logdev, level: :info) + # Logger.new(logdev, progname: 'progname') + # Logger.new(logdev, formatter: formatter) + # Logger.new(logdev, datetime_format: '%Y-%m-%d %H:%M:%S') + # + # === Args + # + # +logdev+:: + # The log device. This is a filename (String), IO object (typically + # +STDOUT+, +STDERR+, or an open file), +nil+ (it writes nothing) or + # +File::NULL+ (same as +nil+). + # +shift_age+:: + # Number of old log files to keep, *or* frequency of rotation (+daily+, + # +weekly+ or +monthly+). Default value is 0, which disables log file + # rotation. + # +shift_size+:: + # Maximum logfile size in bytes (only applies when +shift_age+ is a positive + # Integer). Defaults to +1048576+ (1MB). + # +level+:: + # Logging severity threshold. Default values is Logger::DEBUG. + # +progname+:: + # Program name to include in log messages. Default value is nil. + # +formatter+:: + # Logging formatter. Default values is an instance of Logger::Formatter. + # +datetime_format+:: + # Date and time format. Default value is '%Y-%m-%d %H:%M:%S'. + # +binmode+:: + # Use binary mode on the log device. Default value is false. + # +shift_period_suffix+:: + # The log file suffix format for +daily+, +weekly+ or +monthly+ rotation. + # Default is '%Y%m%d'. + # + # === Description + # + # Create an instance. + # + def initialize(logdev, shift_age = 0, shift_size = 1048576, level: DEBUG, + progname: nil, formatter: nil, datetime_format: nil, + binmode: false, shift_period_suffix: '%Y%m%d') + self.level = level + self.progname = progname + @default_formatter = Formatter.new + self.datetime_format = datetime_format + self.formatter = formatter + @logdev = nil + if logdev && logdev != File::NULL + @logdev = LogDevice.new(logdev, shift_age: shift_age, + shift_size: shift_size, + shift_period_suffix: shift_period_suffix, + binmode: binmode) + end + end + + # + # :call-seq: + # Logger#reopen + # Logger#reopen(logdev) + # + # === Args + # + # +logdev+:: + # The log device. This is a filename (String) or IO object (typically + # +STDOUT+, +STDERR+, or an open file). reopen the same filename if + # it is +nil+, do nothing for IO. Default is +nil+. + # + # === Description + # + # Reopen a log device. + # + def reopen(logdev = nil) + @logdev&.reopen(logdev) + self + end + + # + # :call-seq: + # Logger#add(severity, message = nil, progname = nil) { ... } + # + # === Args + # + # +severity+:: + # Severity. Constants are defined in Logger namespace: +DEBUG+, +INFO+, + # +WARN+, +ERROR+, +FATAL+, or +UNKNOWN+. + # +message+:: + # The log message. A String or Exception. + # +progname+:: + # Program name string. Can be omitted. Treated as a message if no + # +message+ and +block+ are given. + # +block+:: + # Can be omitted. Called to get a message string if +message+ is nil. + # + # === Return + # + # When the given severity is not high enough (for this particular logger), + # log no message, and return +true+. + # + # === Description + # + # Log a message if the given severity is high enough. This is the generic + # logging method. Users will be more inclined to use #debug, #info, #warn, + # #error, and #fatal. + # + # Message format: +message+ can be any object, but it has to be + # converted to a String in order to log it. Generally, +inspect+ is used + # if the given object is not a String. + # A special case is an +Exception+ object, which will be printed in detail, + # including message, class, and backtrace. See #msg2str for the + # implementation if required. + # + # === Bugs + # + # * Logfile is not locked. + # * Append open does not need to lock file. + # * If the OS supports multi I/O, records possibly may be mixed. + # + def add(severity, message = nil, progname = nil) + severity ||= UNKNOWN + if @logdev.nil? or severity < level + return true + end + if progname.nil? + progname = @progname + end + if message.nil? + if block_given? + message = yield + else + message = progname + progname = @progname + end + end + @logdev.write( + format_message(format_severity(severity), Time.now, progname, message)) + true + end + alias log add + + # + # Dump given message to the log device without any formatting. If no log + # device exists, return +nil+. + # + def <<(msg) + @logdev&.write(msg) + end + + # + # Log a +DEBUG+ message. + # + # See #info for more information. + # + def debug(progname = nil, &block) + add(DEBUG, nil, progname, &block) + end + + # + # :call-seq: + # info(message) + # info(progname, &block) + # + # Log an +INFO+ message. + # + # +message+:: The message to log; does not need to be a String. + # +progname+:: In the block form, this is the #progname to use in the + # log message. The default can be set with #progname=. + # +block+:: Evaluates to the message to log. This is not evaluated unless + # the logger's level is sufficient to log the message. This + # allows you to create potentially expensive logging messages that + # are only called when the logger is configured to show them. + # + # === Examples + # + # logger.info("MainApp") { "Received connection from #{ip}" } + # # ... + # logger.info "Waiting for input from user" + # # ... + # logger.info { "User typed #{input}" } + # + # You'll probably stick to the second form above, unless you want to provide a + # program name (which you can do with #progname= as well). + # + # === Return + # + # See #add. + # + def info(progname = nil, &block) + add(INFO, nil, progname, &block) + end + + # + # Log a +WARN+ message. + # + # See #info for more information. + # + def warn(progname = nil, &block) + add(WARN, nil, progname, &block) + end + + # + # Log an +ERROR+ message. + # + # See #info for more information. + # + def error(progname = nil, &block) + add(ERROR, nil, progname, &block) + end + + # + # Log a +FATAL+ message. + # + # See #info for more information. + # + def fatal(progname = nil, &block) + add(FATAL, nil, progname, &block) + end + + # + # Log an +UNKNOWN+ message. This will be printed no matter what the logger's + # level is. + # + # See #info for more information. + # + def unknown(progname = nil, &block) + add(UNKNOWN, nil, progname, &block) + end + + # + # Close the logging device. + # + def close + @logdev&.close + end + +private + + # Severity label for logging (max 5 chars). + SEV_LABEL = %w(DEBUG INFO WARN ERROR FATAL ANY).freeze + + def format_severity(severity) + SEV_LABEL[severity] || 'ANY' + end + + def format_message(severity, datetime, progname, msg) + (@formatter || @default_formatter).call(severity, datetime, progname, msg) + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/errors.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/errors.rb new file mode 100644 index 000000000..e8925e14a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/errors.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +# not used after 1.2.7. just for compat. +class Logger + class Error < RuntimeError # :nodoc: + end + class ShiftingError < Error # :nodoc: + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/formatter.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/formatter.rb new file mode 100644 index 000000000..6a135b6fa --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/formatter.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +class Logger + # Default formatter for log messages. + class Formatter + Format = "%s, [%s#%d] %5s -- %s: %s\n" + + attr_accessor :datetime_format + + def initialize + @datetime_format = nil + end + + def call(severity, time, progname, msg) + Format % [severity[0..0], format_datetime(time), Process.pid, severity, progname, + msg2str(msg)] + end + + private + + def format_datetime(time) + time.strftime(@datetime_format || "%Y-%m-%dT%H:%M:%S.%6N ") + end + + def msg2str(msg) + case msg + when ::String + msg + when ::Exception + "#{ msg.message } (#{ msg.class })\n#{ msg.backtrace.join("\n") if msg.backtrace }" + else + msg.inspect + end + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/log_device.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/log_device.rb new file mode 100644 index 000000000..96d77b7b6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/log_device.rb @@ -0,0 +1,205 @@ +# frozen_string_literal: true + +require_relative 'period' + +class Logger + # Device used for logging messages. + class LogDevice + include Period + + attr_reader :dev + attr_reader :filename + include MonitorMixin + + def initialize(log = nil, shift_age: nil, shift_size: nil, shift_period_suffix: nil, binmode: false) + @dev = @filename = @shift_age = @shift_size = @shift_period_suffix = nil + @binmode = binmode + mon_initialize + set_dev(log) + if @filename + @shift_age = shift_age || 7 + @shift_size = shift_size || 1048576 + @shift_period_suffix = shift_period_suffix || '%Y%m%d' + + unless @shift_age.is_a?(Integer) + base_time = @dev.respond_to?(:stat) ? @dev.stat.mtime : Time.now + @next_rotate_time = next_rotate_time(base_time, @shift_age) + end + end + end + + def write(message) + begin + synchronize do + if @shift_age and @dev.respond_to?(:stat) + begin + check_shift_log + rescue + warn("log shifting failed. #{$!}") + end + end + begin + @dev.write(message) + rescue + warn("log writing failed. #{$!}") + end + end + rescue Exception => ignored + warn("log writing failed. #{ignored}") + end + end + + def close + begin + synchronize do + @dev.close rescue nil + end + rescue Exception + @dev.close rescue nil + end + end + + def reopen(log = nil) + # reopen the same filename if no argument, do nothing for IO + log ||= @filename if @filename + if log + synchronize do + if @filename and @dev + @dev.close rescue nil # close only file opened by Logger + @filename = nil + end + set_dev(log) + end + end + self + end + + private + + def set_dev(log) + if log.respond_to?(:write) and log.respond_to?(:close) + @dev = log + if log.respond_to?(:path) + @filename = log.path + end + else + @dev = open_logfile(log) + @dev.sync = true + @dev.binmode if @binmode + @filename = log + end + end + + def open_logfile(filename) + begin + File.open(filename, (File::WRONLY | File::APPEND)) + rescue Errno::ENOENT + create_logfile(filename) + end + end + + def create_logfile(filename) + begin + logdev = File.open(filename, (File::WRONLY | File::APPEND | File::CREAT | File::EXCL)) + logdev.flock(File::LOCK_EX) + logdev.sync = true + logdev.binmode if @binmode + add_log_header(logdev) + logdev.flock(File::LOCK_UN) + rescue Errno::EEXIST + # file is created by another process + logdev = open_logfile(filename) + logdev.sync = true + end + logdev + end + + def add_log_header(file) + file.write( + "# Logfile created on %s by %s\n" % [Time.now.to_s, Logger::ProgName] + ) if file.size == 0 + end + + def check_shift_log + if @shift_age.is_a?(Integer) + # Note: always returns false if '0'. + if @filename && (@shift_age > 0) && (@dev.stat.size > @shift_size) + lock_shift_log { shift_log_age } + end + else + now = Time.now + if now >= @next_rotate_time + @next_rotate_time = next_rotate_time(now, @shift_age) + lock_shift_log { shift_log_period(previous_period_end(now, @shift_age)) } + end + end + end + + if /mswin|mingw|cygwin/ =~ RUBY_PLATFORM + def lock_shift_log + yield + end + else + def lock_shift_log + retry_limit = 8 + retry_sleep = 0.1 + begin + File.open(@filename, File::WRONLY | File::APPEND) do |lock| + lock.flock(File::LOCK_EX) # inter-process locking. will be unlocked at closing file + if File.identical?(@filename, lock) and File.identical?(lock, @dev) + yield # log shifting + else + # log shifted by another process (i-node before locking and i-node after locking are different) + @dev.close rescue nil + @dev = open_logfile(@filename) + @dev.sync = true + end + end + rescue Errno::ENOENT + # @filename file would not exist right after #rename and before #create_logfile + if retry_limit <= 0 + warn("log rotation inter-process lock failed. #{$!}") + else + sleep retry_sleep + retry_limit -= 1 + retry_sleep *= 2 + retry + end + end + rescue + warn("log rotation inter-process lock failed. #{$!}") + end + end + + def shift_log_age + (@shift_age-3).downto(0) do |i| + if FileTest.exist?("#{@filename}.#{i}") + File.rename("#{@filename}.#{i}", "#{@filename}.#{i+1}") + end + end + @dev.close rescue nil + File.rename("#{@filename}", "#{@filename}.0") + @dev = create_logfile(@filename) + return true + end + + def shift_log_period(period_end) + suffix = period_end.strftime(@shift_period_suffix) + age_file = "#{@filename}.#{suffix}" + if FileTest.exist?(age_file) + # try to avoid filename crash caused by Timestamp change. + idx = 0 + # .99 can be overridden; avoid too much file search with 'loop do' + while idx < 100 + idx += 1 + age_file = "#{@filename}.#{suffix}.#{idx}" + break unless FileTest.exist?(age_file) + end + end + @dev.close rescue nil + File.rename("#{@filename}", age_file) + @dev = create_logfile(@filename) + return true + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/period.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/period.rb new file mode 100644 index 000000000..0a291dbbb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/period.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +class Logger + module Period + module_function + + SiD = 24 * 60 * 60 + + def next_rotate_time(now, shift_age) + case shift_age + when 'daily' + t = Time.mktime(now.year, now.month, now.mday) + SiD + when 'weekly' + t = Time.mktime(now.year, now.month, now.mday) + SiD * (7 - now.wday) + when 'monthly' + t = Time.mktime(now.year, now.month, 1) + SiD * 32 + return Time.mktime(t.year, t.month, 1) + when 'now', 'everytime' + return now + else + raise ArgumentError, "invalid :shift_age #{shift_age.inspect}, should be daily, weekly, monthly, or everytime" + end + if t.hour.nonzero? or t.min.nonzero? or t.sec.nonzero? + hour = t.hour + t = Time.mktime(t.year, t.month, t.mday) + t += SiD if hour > 12 + end + t + end + + def previous_period_end(now, shift_age) + case shift_age + when 'daily' + t = Time.mktime(now.year, now.month, now.mday) - SiD / 2 + when 'weekly' + t = Time.mktime(now.year, now.month, now.mday) - (SiD * now.wday + SiD / 2) + when 'monthly' + t = Time.mktime(now.year, now.month, 1) - SiD / 2 + when 'now', 'everytime' + return now + else + raise ArgumentError, "invalid :shift_age #{shift_age.inspect}, should be daily, weekly, monthly, or everytime" + end + Time.mktime(t.year, t.month, t.mday, 23, 59, 59) + end + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/severity.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/severity.rb new file mode 100644 index 000000000..b38afb7d2 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/severity.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class Logger + # Logging severity. + module Severity + # Low-level information, mostly for developers. + DEBUG = 0 + # Generic (useful) information about system operation. + INFO = 1 + # A warning. + WARN = 2 + # A handleable error condition. + ERROR = 3 + # An unhandleable error that results in a program crash. + FATAL = 4 + # An unknown message that should always be logged. + UNKNOWN = 5 + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/version.rb b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/version.rb new file mode 100644 index 000000000..b2e6909c3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/lib/logger/version.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class Logger + VERSION = "1.4.4" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/logger.gemspec b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/logger.gemspec new file mode 100644 index 000000000..ccd4e70db --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/logger-1.4.4/logger.gemspec @@ -0,0 +1,27 @@ +begin + require_relative "lib/logger/version" +rescue LoadError # Fallback to load version file in ruby core repository + require_relative "version" +end + +Gem::Specification.new do |spec| + spec.name = "logger" + spec.version = Logger::VERSION + spec.authors = ["Naotoshi Seo", "SHIBATA Hiroshi"] + spec.email = ["sonots@gmail.com", "hsbt@ruby-lang.org"] + + spec.summary = %q{Provides a simple logging utility for outputting messages.} + spec.description = %q{Provides a simple logging utility for outputting messages.} + spec.homepage = "https://github.com/ruby/logger" + spec.licenses = ["Ruby", "BSD-2-Clause"] + + spec.files = Dir.glob("lib/**/*.rb") + ["logger.gemspec"] + spec.require_paths = ["lib"] + + spec.required_ruby_version = ">= 2.3.0" + + spec.add_development_dependency "bundler", ">= 0" + spec.add_development_dependency "rake", ">= 12.3.3" + spec.add_development_dependency "test-unit" + spec.add_development_dependency "rdoc" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/BSDL b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/BSDL new file mode 100644 index 000000000..66d93598a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/BSDL @@ -0,0 +1,22 @@ +Copyright (C) 1993-2013 Yukihiro Matsumoto. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/COPYING b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/COPYING new file mode 100644 index 000000000..48e5a96de --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/COPYING @@ -0,0 +1,56 @@ +Ruby is copyrighted free software by Yukihiro Matsumoto . +You can redistribute it and/or modify it under either the terms of the +2-clause BSDL (see the file BSDL), or the conditions below: + +1. You may make and give away verbatim copies of the source form of the + software without restriction, provided that you duplicate all of the + original copyright notices and associated disclaimers. + +2. You may modify your copy of the software in any way, provided that + you do at least ONE of the following: + + a. place your modifications in the Public Domain or otherwise + make them Freely Available, such as by posting said + modifications to Usenet or an equivalent medium, or by allowing + the author to include your modifications in the software. + + b. use the modified software only within your corporation or + organization. + + c. give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d. make other distribution arrangements with the author. + +3. You may distribute the software in object code or binary form, + provided that you do at least ONE of the following: + + a. distribute the binaries and library files of the software, + together with instructions (in the manual page or equivalent) + on where to get the original distribution. + + b. accompany the distribution with the machine-readable source of + the software. + + c. give non-standard binaries non-standard names, with + instructions on where to get the original software distribution. + + d. make other distribution arrangements with the author. + +4. You may modify and include the part of the software into any other + software (possibly commercial). But some files in the distribution + are not written by the author, so that they are not under these terms. + + For the list of those files and their copying conditions, see the + file LEGAL. + +5. The scripts and library files supplied as input to or produced as + output from the software do not automatically fall under the + copyright of the software, but belong to whomever generated them, + and may be sold commercially, and may be aggregated with this + software. + +6. THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Gemfile b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Gemfile new file mode 100644 index 000000000..78b50ffc1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Gemfile @@ -0,0 +1,8 @@ +source "https://rubygems.org" + +gemspec + +gem "rake" +gem "test-unit" +gem "test-unit-ruby-core" +gem "webrick" diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/README.md b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/README.md new file mode 100644 index 000000000..fcc1f395f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/README.md @@ -0,0 +1,93 @@ +# Net::HTTP + +Net::HTTP provides a rich library which can be used to build HTTP +user-agents. For more details about HTTP see +[RFC9110 HTTP Semantics](https://www.ietf.org/rfc/rfc9110.html) and +[RFC9112 HTTP/1.1](https://www.ietf.org/rfc/rfc9112.html). + +Net::HTTP is designed to work closely with URI. URI::HTTP#host, +URI::HTTP#port and URI::HTTP#request_uri are designed to work with +Net::HTTP. + +If you are only performing a few GET requests you should try OpenURI. + +## Installation + +Add this line to your application's Gemfile: + +```ruby +gem 'net-http' +``` + +And then execute: + + $ bundle install + +Or install it yourself as: + + $ gem install net-http + +## Usage + +All examples assume you have loaded Net::HTTP with: + +```ruby +require 'net/http' +``` + +This will also require 'uri' so you don't need to require it separately. + +The Net::HTTP methods in the following section do not persist +connections. They are not recommended if you are performing many HTTP +requests. + +### GET + +```ruby +Net::HTTP.get('example.com', '/index.html') # => String +``` + +### GET by URI + +```ruby +uri = URI('http://example.com/index.html?count=10') +Net::HTTP.get(uri) # => String +``` + +### GET with Dynamic Parameters + +```ruby +uri = URI('http://example.com/index.html') +params = { :limit => 10, :page => 3 } +uri.query = URI.encode_www_form(params) + +res = Net::HTTP.get_response(uri) +puts res.body if res.is_a?(Net::HTTPSuccess) +``` + +### POST + +```ruby +uri = URI('http://www.example.com/search.cgi') +res = Net::HTTP.post_form(uri, 'q' => 'ruby', 'max' => '50') +puts res.body +``` + +### POST with Multiple Values + +```ruby +uri = URI('http://www.example.com/search.cgi') +res = Net::HTTP.post_form(uri, 'q' => ['ruby', 'perl'], 'max' => '50') +puts res.body +``` + +## Development + +After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. + +To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org). + +## Contributing + +Bug reports and pull requests are welcome on GitHub at https://github.com/ruby/net-http. + diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Rakefile b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Rakefile new file mode 100644 index 000000000..5d512c89e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/Rakefile @@ -0,0 +1,10 @@ +require "bundler/gem_tasks" +require "rake/testtask" + +Rake::TestTask.new(:test) do |t| + t.libs << "test/lib" + t.ruby_opts << "-rhelper" + t.test_files = FileList["test/**/test_*.rb"] +end + +task :default => :test diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/examples.rdoc b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/examples.rdoc new file mode 100644 index 000000000..c1366e7ad --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/examples.rdoc @@ -0,0 +1,31 @@ +Examples here assume that net/http has been required +(which also requires +uri+): + + require 'net/http' + +Many code examples here use these example websites: + +- https://jsonplaceholder.typicode.com. +- http://example.com. + +Some examples also assume these variables: + + uri = URI('https://jsonplaceholder.typicode.com/') + uri.freeze # Examples may not modify. + hostname = uri.hostname # => "jsonplaceholder.typicode.com" + path = uri.path # => "/" + port = uri.port # => 443 + +So that example requests may be written as: + + Net::HTTP.get(uri) + Net::HTTP.get(hostname, '/index.html') + Net::HTTP.start(hostname) do |http| + http.get('/todos/1') + http.get('/todos/2') + end + +An example that needs a modified URI first duplicates +uri+, then modifies the duplicate: + + _uri = uri.dup + _uri.path = '/todos/1' diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/included_getters.rdoc b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/included_getters.rdoc new file mode 100644 index 000000000..7ac327f4b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/doc/net-http/included_getters.rdoc @@ -0,0 +1,3 @@ +This class also includes (indirectly) module Net::HTTPHeader, +which gives access to its +{methods for getting headers}[rdoc-ref:Net::HTTPHeader@Getters]. diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http.rb new file mode 100644 index 000000000..cea5cc518 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http.rb @@ -0,0 +1,2580 @@ +# frozen_string_literal: true +# +# = net/http.rb +# +# Copyright (c) 1999-2007 Yukihiro Matsumoto +# Copyright (c) 1999-2007 Minero Aoki +# Copyright (c) 2001 GOTOU Yuuzou +# +# Written and maintained by Minero Aoki . +# HTTPS support added by GOTOU Yuuzou . +# +# This file is derived from "http-access.rb". +# +# Documented by Minero Aoki; converted to RDoc by William Webber. +# +# This program is free software. You can re-distribute and/or +# modify this program under the same terms of ruby itself --- +# Ruby Distribution License or GNU General Public License. +# +# See Net::HTTP for an overview and examples. +# + +require 'net/protocol' +require 'uri' +require 'resolv' +autoload :OpenSSL, 'openssl' + +module Net #:nodoc: + + # :stopdoc: + class HTTPBadResponse < StandardError; end + class HTTPHeaderSyntaxError < StandardError; end + # :startdoc: + + # \Class \Net::HTTP provides a rich library that implements the client + # in a client-server model that uses the \HTTP request-response protocol. + # For information about \HTTP, see: + # + # - {Hypertext Transfer Protocol}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol]. + # - {Technical overview}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Technical_overview]. + # + # == About the Examples + # + # :include: doc/net-http/examples.rdoc + # + # == Strategies + # + # - If you will make only a few GET requests, + # consider using {OpenURI}[https://docs.ruby-lang.org/en/master/OpenURI.html]. + # - If you will make only a few requests of all kinds, + # consider using the various singleton convenience methods in this class. + # Each of the following methods automatically starts and finishes + # a {session}[rdoc-ref:Net::HTTP@Sessions] that sends a single request: + # + # # Return string response body. + # Net::HTTP.get(hostname, path) + # Net::HTTP.get(uri) + # + # # Write string response body to $stdout. + # Net::HTTP.get_print(hostname, path) + # Net::HTTP.get_print(uri) + # + # # Return response as Net::HTTPResponse object. + # Net::HTTP.get_response(hostname, path) + # Net::HTTP.get_response(uri) + # data = '{"title": "foo", "body": "bar", "userId": 1}' + # Net::HTTP.post(uri, data) + # params = {title: 'foo', body: 'bar', userId: 1} + # Net::HTTP.post_form(uri, params) + # data = '{"title": "foo", "body": "bar", "userId": 1}' + # Net::HTTP.put(uri, data) + # + # - If performance is important, consider using sessions, which lower request overhead. + # This {session}[rdoc-ref:Net::HTTP@Sessions] has multiple requests for + # {HTTP methods}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Request_methods] + # and {WebDAV methods}[https://en.wikipedia.org/wiki/WebDAV#Implementation]: + # + # Net::HTTP.start(hostname) do |http| + # # Session started automatically before block execution. + # http.get(path) + # http.head(path) + # body = 'Some text' + # http.post(path, body) # Can also have a block. + # http.put(path, body) + # http.delete(path) + # http.options(path) + # http.trace(path) + # http.patch(path, body) # Can also have a block. + # http.copy(path) + # http.lock(path, body) + # http.mkcol(path, body) + # http.move(path) + # http.propfind(path, body) + # http.proppatch(path, body) + # http.unlock(path, body) + # # Session finished automatically at block exit. + # end + # + # The methods cited above are convenience methods that, via their few arguments, + # allow minimal control over the requests. + # For greater control, consider using {request objects}[rdoc-ref:Net::HTTPRequest]. + # + # == URIs + # + # On the internet, a URI + # ({Universal Resource Identifier}[https://en.wikipedia.org/wiki/Uniform_Resource_Identifier]) + # is a string that identifies a particular resource. + # It consists of some or all of: scheme, hostname, path, query, and fragment; + # see {URI syntax}[https://en.wikipedia.org/wiki/Uniform_Resource_Identifier#Syntax]. + # + # A Ruby {URI::Generic}[https://docs.ruby-lang.org/en/master/URI/Generic.html] object + # represents an internet URI. + # It provides, among others, methods + # +scheme+, +hostname+, +path+, +query+, and +fragment+. + # + # === Schemes + # + # An internet \URI has + # a {scheme}[https://en.wikipedia.org/wiki/List_of_URI_schemes]. + # + # The two schemes supported in \Net::HTTP are 'https' and 'http': + # + # uri.scheme # => "https" + # URI('http://example.com').scheme # => "http" + # + # === Hostnames + # + # A hostname identifies a server (host) to which requests may be sent: + # + # hostname = uri.hostname # => "jsonplaceholder.typicode.com" + # Net::HTTP.start(hostname) do |http| + # # Some HTTP stuff. + # end + # + # === Paths + # + # A host-specific path identifies a resource on the host: + # + # _uri = uri.dup + # _uri.path = '/todos/1' + # hostname = _uri.hostname + # path = _uri.path + # Net::HTTP.get(hostname, path) + # + # === Queries + # + # A host-specific query adds name/value pairs to the URI: + # + # _uri = uri.dup + # params = {userId: 1, completed: false} + # _uri.query = URI.encode_www_form(params) + # _uri # => # + # Net::HTTP.get(_uri) + # + # === Fragments + # + # A {URI fragment}[https://en.wikipedia.org/wiki/URI_fragment] has no effect + # in \Net::HTTP; + # the same data is returned, regardless of whether a fragment is included. + # + # == Request Headers + # + # Request headers may be used to pass additional information to the host, + # similar to arguments passed in a method call; + # each header is a name/value pair. + # + # Each of the \Net::HTTP methods that sends a request to the host + # has optional argument +headers+, + # where the headers are expressed as a hash of field-name/value pairs: + # + # headers = {Accept: 'application/json', Connection: 'Keep-Alive'} + # Net::HTTP.get(uri, headers) + # + # See lists of both standard request fields and common request fields at + # {Request Fields}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Request_fields]. + # A host may also accept other custom fields. + # + # == \HTTP Sessions + # + # A _session_ is a connection between a server (host) and a client that: + # + # - Is begun by instance method Net::HTTP#start. + # - May contain any number of requests. + # - Is ended by instance method Net::HTTP#finish. + # + # See example sessions at {Strategies}[rdoc-ref:Net::HTTP@Strategies]. + # + # === Session Using \Net::HTTP.start + # + # If you have many requests to make to a single host (and port), + # consider using singleton method Net::HTTP.start with a block; + # the method handles the session automatically by: + # + # - Calling #start before block execution. + # - Executing the block. + # - Calling #finish after block execution. + # + # In the block, you can use these instance methods, + # each of which that sends a single request: + # + # - {HTTP methods}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Request_methods]: + # + # - #get, #request_get: GET. + # - #head, #request_head: HEAD. + # - #post, #request_post: POST. + # - #delete: DELETE. + # - #options: OPTIONS. + # - #trace: TRACE. + # - #patch: PATCH. + # + # - {WebDAV methods}[https://en.wikipedia.org/wiki/WebDAV#Implementation]: + # + # - #copy: COPY. + # - #lock: LOCK. + # - #mkcol: MKCOL. + # - #move: MOVE. + # - #propfind: PROPFIND. + # - #proppatch: PROPPATCH. + # - #unlock: UNLOCK. + # + # === Session Using \Net::HTTP.start and \Net::HTTP.finish + # + # You can manage a session manually using methods #start and #finish: + # + # http = Net::HTTP.new(hostname) + # http.start + # http.get('/todos/1') + # http.get('/todos/2') + # http.delete('/posts/1') + # http.finish # Needed to free resources. + # + # === Single-Request Session + # + # Certain convenience methods automatically handle a session by: + # + # - Creating an \HTTP object + # - Starting a session. + # - Sending a single request. + # - Finishing the session. + # - Destroying the object. + # + # Such methods that send GET requests: + # + # - ::get: Returns the string response body. + # - ::get_print: Writes the string response body to $stdout. + # - ::get_response: Returns a Net::HTTPResponse object. + # + # Such methods that send POST requests: + # + # - ::post: Posts data to the host. + # - ::post_form: Posts form data to the host. + # + # == \HTTP Requests and Responses + # + # Many of the methods above are convenience methods, + # each of which sends a request and returns a string + # without directly using \Net::HTTPRequest and \Net::HTTPResponse objects. + # + # You can, however, directly create a request object, send the request, + # and retrieve the response object; see: + # + # - Net::HTTPRequest. + # - Net::HTTPResponse. + # + # == Following Redirection + # + # Each returned response is an instance of a subclass of Net::HTTPResponse. + # See the {response class hierarchy}[rdoc-ref:Net::HTTPResponse@Response+Subclasses]. + # + # In particular, class Net::HTTPRedirection is the parent + # of all redirection classes. + # This allows you to craft a case statement to handle redirections properly: + # + # def fetch(uri, limit = 10) + # # You should choose a better exception. + # raise ArgumentError, 'Too many HTTP redirects' if limit == 0 + # + # res = Net::HTTP.get_response(URI(uri)) + # case res + # when Net::HTTPSuccess # Any success class. + # res + # when Net::HTTPRedirection # Any redirection class. + # location = res['Location'] + # warn "Redirected to #{location}" + # fetch(location, limit - 1) + # else # Any other class. + # res.value + # end + # end + # + # fetch(uri) + # + # == Basic Authentication + # + # Basic authentication is performed according to + # {RFC2617}[http://www.ietf.org/rfc/rfc2617.txt]: + # + # req = Net::HTTP::Get.new(uri) + # req.basic_auth('user', 'pass') + # res = Net::HTTP.start(hostname) do |http| + # http.request(req) + # end + # + # == Streaming Response Bodies + # + # By default \Net::HTTP reads an entire response into memory. If you are + # handling large files or wish to implement a progress bar you can instead + # stream the body directly to an IO. + # + # Net::HTTP.start(hostname) do |http| + # req = Net::HTTP::Get.new(uri) + # http.request(req) do |res| + # open('t.tmp', 'w') do |f| + # res.read_body do |chunk| + # f.write chunk + # end + # end + # end + # end + # + # == HTTPS + # + # HTTPS is enabled for an \HTTP connection by Net::HTTP#use_ssl=: + # + # Net::HTTP.start(hostname, :use_ssl => true) do |http| + # req = Net::HTTP::Get.new(uri) + # res = http.request(req) + # end + # + # Or if you simply want to make a GET request, you may pass in a URI + # object that has an \HTTPS URL. \Net::HTTP automatically turns on TLS + # verification if the URI object has a 'https' URI scheme: + # + # uri # => # + # Net::HTTP.get(uri) + # + # == Proxy Server + # + # An \HTTP object can have + # a {proxy server}[https://en.wikipedia.org/wiki/Proxy_server]. + # + # You can create an \HTTP object with a proxy server + # using method Net::HTTP.new or method Net::HTTP.start. + # + # The proxy may be defined either by argument +p_addr+ + # or by environment variable 'http_proxy'. + # + # === Proxy Using Argument +p_addr+ as a \String + # + # When argument +p_addr+ is a string hostname, + # the returned +http+ has the given host as its proxy: + # + # http = Net::HTTP.new(hostname, nil, 'proxy.example') + # http.proxy? # => true + # http.proxy_from_env? # => false + # http.proxy_address # => "proxy.example" + # # These use default values. + # http.proxy_port # => 80 + # http.proxy_user # => nil + # http.proxy_pass # => nil + # + # The port, username, and password for the proxy may also be given: + # + # http = Net::HTTP.new(hostname, nil, 'proxy.example', 8000, 'pname', 'ppass') + # # => # + # http.proxy? # => true + # http.proxy_from_env? # => false + # http.proxy_address # => "proxy.example" + # http.proxy_port # => 8000 + # http.proxy_user # => "pname" + # http.proxy_pass # => "ppass" + # + # === Proxy Using 'ENV['http_proxy']' + # + # When environment variable 'http_proxy' + # is set to a \URI string, + # the returned +http+ will have the server at that URI as its proxy; + # note that the \URI string must have a protocol + # such as 'http' or 'https': + # + # ENV['http_proxy'] = 'http://example.com' + # http = Net::HTTP.new(hostname) + # http.proxy? # => true + # http.proxy_from_env? # => true + # http.proxy_address # => "example.com" + # # These use default values. + # http.proxy_port # => 80 + # http.proxy_user # => nil + # http.proxy_pass # => nil + # + # The \URI string may include proxy username, password, and port number: + # + # ENV['http_proxy'] = 'http://pname:ppass@example.com:8000' + # http = Net::HTTP.new(hostname) + # http.proxy? # => true + # http.proxy_from_env? # => true + # http.proxy_address # => "example.com" + # http.proxy_port # => 8000 + # http.proxy_user # => "pname" + # http.proxy_pass # => "ppass" + # + # === Filtering Proxies + # + # With method Net::HTTP.new (but not Net::HTTP.start), + # you can use argument +p_no_proxy+ to filter proxies: + # + # - Reject a certain address: + # + # http = Net::HTTP.new('example.com', nil, 'proxy.example', 8000, 'pname', 'ppass', 'proxy.example') + # http.proxy_address # => nil + # + # - Reject certain domains or subdomains: + # + # http = Net::HTTP.new('example.com', nil, 'my.proxy.example', 8000, 'pname', 'ppass', 'proxy.example') + # http.proxy_address # => nil + # + # - Reject certain addresses and port combinations: + # + # http = Net::HTTP.new('example.com', nil, 'proxy.example', 8000, 'pname', 'ppass', 'proxy.example:1234') + # http.proxy_address # => "proxy.example" + # + # http = Net::HTTP.new('example.com', nil, 'proxy.example', 8000, 'pname', 'ppass', 'proxy.example:8000') + # http.proxy_address # => nil + # + # - Reject a list of the types above delimited using a comma: + # + # http = Net::HTTP.new('example.com', nil, 'proxy.example', 8000, 'pname', 'ppass', 'my.proxy,proxy.example:8000') + # http.proxy_address # => nil + # + # http = Net::HTTP.new('example.com', nil, 'my.proxy', 8000, 'pname', 'ppass', 'my.proxy,proxy.example:8000') + # http.proxy_address # => nil + # + # == Compression and Decompression + # + # \Net::HTTP does not compress the body of a request before sending. + # + # By default, \Net::HTTP adds header 'Accept-Encoding' + # to a new {request object}[rdoc-ref:Net::HTTPRequest]: + # + # Net::HTTP::Get.new(uri)['Accept-Encoding'] + # # => "gzip;q=1.0,deflate;q=0.6,identity;q=0.3" + # + # This requests the server to zip-encode the response body if there is one; + # the server is not required to do so. + # + # \Net::HTTP does not automatically decompress a response body + # if the response has header 'Content-Range'. + # + # Otherwise decompression (or not) depends on the value of header + # {Content-Encoding}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-encoding-response-header]: + # + # - 'deflate', 'gzip', or 'x-gzip': + # decompresses the body and deletes the header. + # - 'none' or 'identity': + # does not decompress the body, but deletes the header. + # - Any other value: + # leaves the body and header unchanged. + # + # == What's Here + # + # First, what's elsewhere. Class Net::HTTP: + # + # - Inherits from {class Object}[https://docs.ruby-lang.org/en/master/Object.html#class-Object-label-What-27s+Here]. + # + # This is a categorized summary of methods and attributes. + # + # === \Net::HTTP Objects + # + # - {::new}[rdoc-ref:Net::HTTP.new]: + # Creates a new instance. + # - {#inspect}[rdoc-ref:Net::HTTP#inspect]: + # Returns a string representation of +self+. + # + # === Sessions + # + # - {::start}[rdoc-ref:Net::HTTP.start]: + # Begins a new session in a new \Net::HTTP object. + # - {#started?}[rdoc-ref:Net::HTTP#started?] + # (aliased as {#active?}[rdoc-ref:Net::HTTP#active?]): + # Returns whether in a session. + # - {#finish}[rdoc-ref:Net::HTTP#finish]: + # Ends an active session. + # - {#start}[rdoc-ref:Net::HTTP#start]: + # Begins a new session in an existing \Net::HTTP object (+self+). + # + # === Connections + # + # - {:continue_timeout}[rdoc-ref:Net::HTTP#continue_timeout]: + # Returns the continue timeout. + # - {#continue_timeout=}[rdoc-ref:Net::HTTP#continue_timeout=]: + # Sets the continue timeout seconds. + # - {:keep_alive_timeout}[rdoc-ref:Net::HTTP#keep_alive_timeout]: + # Returns the keep-alive timeout. + # - {:keep_alive_timeout=}[rdoc-ref:Net::HTTP#keep_alive_timeout=]: + # Sets the keep-alive timeout. + # - {:max_retries}[rdoc-ref:Net::HTTP#max_retries]: + # Returns the maximum retries. + # - {#max_retries=}[rdoc-ref:Net::HTTP#max_retries=]: + # Sets the maximum retries. + # - {:open_timeout}[rdoc-ref:Net::HTTP#open_timeout]: + # Returns the open timeout. + # - {:open_timeout=}[rdoc-ref:Net::HTTP#open_timeout=]: + # Sets the open timeout. + # - {:read_timeout}[rdoc-ref:Net::HTTP#read_timeout]: + # Returns the open timeout. + # - {:read_timeout=}[rdoc-ref:Net::HTTP#read_timeout=]: + # Sets the read timeout. + # - {:ssl_timeout}[rdoc-ref:Net::HTTP#ssl_timeout]: + # Returns the ssl timeout. + # - {:ssl_timeout=}[rdoc-ref:Net::HTTP#ssl_timeout=]: + # Sets the ssl timeout. + # - {:write_timeout}[rdoc-ref:Net::HTTP#write_timeout]: + # Returns the write timeout. + # - {write_timeout=}[rdoc-ref:Net::HTTP#write_timeout=]: + # Sets the write timeout. + # + # === Requests + # + # - {::get}[rdoc-ref:Net::HTTP.get]: + # Sends a GET request and returns the string response body. + # - {::get_print}[rdoc-ref:Net::HTTP.get_print]: + # Sends a GET request and write the string response body to $stdout. + # - {::get_response}[rdoc-ref:Net::HTTP.get_response]: + # Sends a GET request and returns a response object. + # - {::post_form}[rdoc-ref:Net::HTTP.post_form]: + # Sends a POST request with form data and returns a response object. + # - {::post}[rdoc-ref:Net::HTTP.post]: + # Sends a POST request with data and returns a response object. + # - {::put}[rdoc-ref:Net::HTTP.put]: + # Sends a PUT request with data and returns a response object. + # - {#copy}[rdoc-ref:Net::HTTP#copy]: + # Sends a COPY request and returns a response object. + # - {#delete}[rdoc-ref:Net::HTTP#delete]: + # Sends a DELETE request and returns a response object. + # - {#get}[rdoc-ref:Net::HTTP#get]: + # Sends a GET request and returns a response object. + # - {#head}[rdoc-ref:Net::HTTP#head]: + # Sends a HEAD request and returns a response object. + # - {#lock}[rdoc-ref:Net::HTTP#lock]: + # Sends a LOCK request and returns a response object. + # - {#mkcol}[rdoc-ref:Net::HTTP#mkcol]: + # Sends a MKCOL request and returns a response object. + # - {#move}[rdoc-ref:Net::HTTP#move]: + # Sends a MOVE request and returns a response object. + # - {#options}[rdoc-ref:Net::HTTP#options]: + # Sends a OPTIONS request and returns a response object. + # - {#patch}[rdoc-ref:Net::HTTP#patch]: + # Sends a PATCH request and returns a response object. + # - {#post}[rdoc-ref:Net::HTTP#post]: + # Sends a POST request and returns a response object. + # - {#propfind}[rdoc-ref:Net::HTTP#propfind]: + # Sends a PROPFIND request and returns a response object. + # - {#proppatch}[rdoc-ref:Net::HTTP#proppatch]: + # Sends a PROPPATCH request and returns a response object. + # - {#put}[rdoc-ref:Net::HTTP#put]: + # Sends a PUT request and returns a response object. + # - {#request}[rdoc-ref:Net::HTTP#request]: + # Sends a request and returns a response object. + # - {#request_get}[rdoc-ref:Net::HTTP#request_get] + # (aliased as {#get2}[rdoc-ref:Net::HTTP#get2]): + # Sends a GET request and forms a response object; + # if a block given, calls the block with the object, + # otherwise returns the object. + # - {#request_head}[rdoc-ref:Net::HTTP#request_head] + # (aliased as {#head2}[rdoc-ref:Net::HTTP#head2]): + # Sends a HEAD request and forms a response object; + # if a block given, calls the block with the object, + # otherwise returns the object. + # - {#request_post}[rdoc-ref:Net::HTTP#request_post] + # (aliased as {#post2}[rdoc-ref:Net::HTTP#post2]): + # Sends a POST request and forms a response object; + # if a block given, calls the block with the object, + # otherwise returns the object. + # - {#send_request}[rdoc-ref:Net::HTTP#send_request]: + # Sends a request and returns a response object. + # - {#trace}[rdoc-ref:Net::HTTP#trace]: + # Sends a TRACE request and returns a response object. + # - {#unlock}[rdoc-ref:Net::HTTP#unlock]: + # Sends an UNLOCK request and returns a response object. + # + # === Responses + # + # - {:close_on_empty_response}[rdoc-ref:Net::HTTP#close_on_empty_response]: + # Returns whether to close connection on empty response. + # - {:close_on_empty_response=}[rdoc-ref:Net::HTTP#close_on_empty_response=]: + # Sets whether to close connection on empty response. + # - {:ignore_eof}[rdoc-ref:Net::HTTP#ignore_eof]: + # Returns whether to ignore end-of-file when reading a response body + # with Content-Length headers. + # - {:ignore_eof=}[rdoc-ref:Net::HTTP#ignore_eof=]: + # Sets whether to ignore end-of-file when reading a response body + # with Content-Length headers. + # - {:response_body_encoding}[rdoc-ref:Net::HTTP#response_body_encoding]: + # Returns the encoding to use for the response body. + # - {#response_body_encoding=}[rdoc-ref:Net::HTTP#response_body_encoding=]: + # Sets the response body encoding. + # + # === Proxies + # + # - {:proxy_address}[rdoc-ref:Net::HTTP#proxy_address]: + # Returns the proxy address. + # - {:proxy_address=}[rdoc-ref:Net::HTTP#proxy_address=]: + # Sets the proxy address. + # - {::proxy_class?}[rdoc-ref:Net::HTTP.proxy_class?]: + # Returns whether +self+ is a proxy class. + # - {#proxy?}[rdoc-ref:Net::HTTP#proxy?]: + # Returns whether +self+ has a proxy. + # - {#proxy_address}[rdoc-ref:Net::HTTP#proxy_address] + # (aliased as {#proxyaddr}[rdoc-ref:Net::HTTP#proxyaddr]): + # Returns the proxy address. + # - {#proxy_from_env?}[rdoc-ref:Net::HTTP#proxy_from_env?]: + # Returns whether the proxy is taken from an environment variable. + # - {:proxy_from_env=}[rdoc-ref:Net::HTTP#proxy_from_env=]: + # Sets whether the proxy is to be taken from an environment variable. + # - {:proxy_pass}[rdoc-ref:Net::HTTP#proxy_pass]: + # Returns the proxy password. + # - {:proxy_pass=}[rdoc-ref:Net::HTTP#proxy_pass=]: + # Sets the proxy password. + # - {:proxy_port}[rdoc-ref:Net::HTTP#proxy_port]: + # Returns the proxy port. + # - {:proxy_port=}[rdoc-ref:Net::HTTP#proxy_port=]: + # Sets the proxy port. + # - {#proxy_user}[rdoc-ref:Net::HTTP#proxy_user]: + # Returns the proxy user name. + # - {:proxy_user=}[rdoc-ref:Net::HTTP#proxy_user=]: + # Sets the proxy user. + # + # === Security + # + # - {:ca_file}[rdoc-ref:Net::HTTP#ca_file]: + # Returns the path to a CA certification file. + # - {:ca_file=}[rdoc-ref:Net::HTTP#ca_file=]: + # Sets the path to a CA certification file. + # - {:ca_path}[rdoc-ref:Net::HTTP#ca_path]: + # Returns the path of to CA directory containing certification files. + # - {:ca_path=}[rdoc-ref:Net::HTTP#ca_path=]: + # Sets the path of to CA directory containing certification files. + # - {:cert}[rdoc-ref:Net::HTTP#cert]: + # Returns the OpenSSL::X509::Certificate object to be used for client certification. + # - {:cert=}[rdoc-ref:Net::HTTP#cert=]: + # Sets the OpenSSL::X509::Certificate object to be used for client certification. + # - {:cert_store}[rdoc-ref:Net::HTTP#cert_store]: + # Returns the X509::Store to be used for verifying peer certificate. + # - {:cert_store=}[rdoc-ref:Net::HTTP#cert_store=]: + # Sets the X509::Store to be used for verifying peer certificate. + # - {:ciphers}[rdoc-ref:Net::HTTP#ciphers]: + # Returns the available SSL ciphers. + # - {:ciphers=}[rdoc-ref:Net::HTTP#ciphers=]: + # Sets the available SSL ciphers. + # - {:extra_chain_cert}[rdoc-ref:Net::HTTP#extra_chain_cert]: + # Returns the extra X509 certificates to be added to the certificate chain. + # - {:extra_chain_cert=}[rdoc-ref:Net::HTTP#extra_chain_cert=]: + # Sets the extra X509 certificates to be added to the certificate chain. + # - {:key}[rdoc-ref:Net::HTTP#key]: + # Returns the OpenSSL::PKey::RSA or OpenSSL::PKey::DSA object. + # - {:key=}[rdoc-ref:Net::HTTP#key=]: + # Sets the OpenSSL::PKey::RSA or OpenSSL::PKey::DSA object. + # - {:max_version}[rdoc-ref:Net::HTTP#max_version]: + # Returns the maximum SSL version. + # - {:max_version=}[rdoc-ref:Net::HTTP#max_version=]: + # Sets the maximum SSL version. + # - {:min_version}[rdoc-ref:Net::HTTP#min_version]: + # Returns the minimum SSL version. + # - {:min_version=}[rdoc-ref:Net::HTTP#min_version=]: + # Sets the minimum SSL version. + # - {#peer_cert}[rdoc-ref:Net::HTTP#peer_cert]: + # Returns the X509 certificate chain for the session's socket peer. + # - {:ssl_version}[rdoc-ref:Net::HTTP#ssl_version]: + # Returns the SSL version. + # - {:ssl_version=}[rdoc-ref:Net::HTTP#ssl_version=]: + # Sets the SSL version. + # - {#use_ssl=}[rdoc-ref:Net::HTTP#use_ssl=]: + # Sets whether a new session is to use Transport Layer Security. + # - {#use_ssl?}[rdoc-ref:Net::HTTP#use_ssl?]: + # Returns whether +self+ uses SSL. + # - {:verify_callback}[rdoc-ref:Net::HTTP#verify_callback]: + # Returns the callback for the server certification verification. + # - {:verify_callback=}[rdoc-ref:Net::HTTP#verify_callback=]: + # Sets the callback for the server certification verification. + # - {:verify_depth}[rdoc-ref:Net::HTTP#verify_depth]: + # Returns the maximum depth for the certificate chain verification. + # - {:verify_depth=}[rdoc-ref:Net::HTTP#verify_depth=]: + # Sets the maximum depth for the certificate chain verification. + # - {:verify_hostname}[rdoc-ref:Net::HTTP#verify_hostname]: + # Returns the flags for server the certification verification at the beginning of the SSL/TLS session. + # - {:verify_hostname=}[rdoc-ref:Net::HTTP#verify_hostname=]: + # Sets he flags for server the certification verification at the beginning of the SSL/TLS session. + # - {:verify_mode}[rdoc-ref:Net::HTTP#verify_mode]: + # Returns the flags for server the certification verification at the beginning of the SSL/TLS session. + # - {:verify_mode=}[rdoc-ref:Net::HTTP#verify_mode=]: + # Sets the flags for server the certification verification at the beginning of the SSL/TLS session. + # + # === Addresses and Ports + # + # - {:address}[rdoc-ref:Net::HTTP#address]: + # Returns the string host name or host IP. + # - {::default_port}[rdoc-ref:Net::HTTP.default_port]: + # Returns integer 80, the default port to use for HTTP requests. + # - {::http_default_port}[rdoc-ref:Net::HTTP.http_default_port]: + # Returns integer 80, the default port to use for HTTP requests. + # - {::https_default_port}[rdoc-ref:Net::HTTP.https_default_port]: + # Returns integer 443, the default port to use for HTTPS requests. + # - {#ipaddr}[rdoc-ref:Net::HTTP#ipaddr]: + # Returns the IP address for the connection. + # - {#ipaddr=}[rdoc-ref:Net::HTTP#ipaddr=]: + # Sets the IP address for the connection. + # - {:local_host}[rdoc-ref:Net::HTTP#local_host]: + # Returns the string local host used to establish the connection. + # - {:local_host=}[rdoc-ref:Net::HTTP#local_host=]: + # Sets the string local host used to establish the connection. + # - {:local_port}[rdoc-ref:Net::HTTP#local_port]: + # Returns the integer local port used to establish the connection. + # - {:local_port=}[rdoc-ref:Net::HTTP#local_port=]: + # Sets the integer local port used to establish the connection. + # - {:port}[rdoc-ref:Net::HTTP#port]: + # Returns the integer port number. + # + # === \HTTP Version + # + # - {::version_1_2?}[rdoc-ref:Net::HTTP.version_1_2?] + # (aliased as {::is_version_1_2?}[rdoc-ref:Net::HTTP.is_version_1_2?] + # and {::version_1_2}[rdoc-ref:Net::HTTP.version_1_2]): + # Returns true; retained for compatibility. + # + # === Debugging + # + # - {#set_debug_output}[rdoc-ref:Net::HTTP#set_debug_output]: + # Sets the output stream for debugging. + # + class HTTP < Protocol + + # :stopdoc: + VERSION = "0.6.0" + HTTPVersion = '1.1' + begin + require 'zlib' + HAVE_ZLIB=true + rescue LoadError + HAVE_ZLIB=false + end + # :startdoc: + + # Returns +true+; retained for compatibility. + def HTTP.version_1_2 + true + end + + # Returns +true+; retained for compatibility. + def HTTP.version_1_2? + true + end + + # Returns +false+; retained for compatibility. + def HTTP.version_1_1? #:nodoc: + false + end + + class << HTTP + alias is_version_1_1? version_1_1? #:nodoc: + alias is_version_1_2? version_1_2? #:nodoc: + end + + # :call-seq: + # Net::HTTP.get_print(hostname, path, port = 80) -> nil + # Net::HTTP:get_print(uri, headers = {}, port = uri.port) -> nil + # + # Like Net::HTTP.get, but writes the returned body to $stdout; + # returns +nil+. + def HTTP.get_print(uri_or_host, path_or_headers = nil, port = nil) + get_response(uri_or_host, path_or_headers, port) {|res| + res.read_body do |chunk| + $stdout.print chunk + end + } + nil + end + + # :call-seq: + # Net::HTTP.get(hostname, path, port = 80) -> body + # Net::HTTP:get(uri, headers = {}, port = uri.port) -> body + # + # Sends a GET request and returns the \HTTP response body as a string. + # + # With string arguments +hostname+ and +path+: + # + # hostname = 'jsonplaceholder.typicode.com' + # path = '/todos/1' + # puts Net::HTTP.get(hostname, path) + # + # Output: + # + # { + # "userId": 1, + # "id": 1, + # "title": "delectus aut autem", + # "completed": false + # } + # + # With URI object +uri+ and optional hash argument +headers+: + # + # uri = URI('https://jsonplaceholder.typicode.com/todos/1') + # headers = {'Content-type' => 'application/json; charset=UTF-8'} + # Net::HTTP.get(uri, headers) + # + # Related: + # + # - Net::HTTP::Get: request class for \HTTP method +GET+. + # - Net::HTTP#get: convenience method for \HTTP method +GET+. + # + def HTTP.get(uri_or_host, path_or_headers = nil, port = nil) + get_response(uri_or_host, path_or_headers, port).body + end + + # :call-seq: + # Net::HTTP.get_response(hostname, path, port = 80) -> http_response + # Net::HTTP:get_response(uri, headers = {}, port = uri.port) -> http_response + # + # Like Net::HTTP.get, but returns a Net::HTTPResponse object + # instead of the body string. + def HTTP.get_response(uri_or_host, path_or_headers = nil, port = nil, &block) + if path_or_headers && !path_or_headers.is_a?(Hash) + host = uri_or_host + path = path_or_headers + new(host, port || HTTP.default_port).start {|http| + return http.request_get(path, &block) + } + else + uri = uri_or_host + headers = path_or_headers + start(uri.hostname, uri.port, + :use_ssl => uri.scheme == 'https') {|http| + return http.request_get(uri, headers, &block) + } + end + end + + # Posts data to a host; returns a Net::HTTPResponse object. + # + # Argument +url+ must be a URL; + # argument +data+ must be a string: + # + # _uri = uri.dup + # _uri.path = '/posts' + # data = '{"title": "foo", "body": "bar", "userId": 1}' + # headers = {'content-type': 'application/json'} + # res = Net::HTTP.post(_uri, data, headers) # => # + # puts res.body + # + # Output: + # + # { + # "title": "foo", + # "body": "bar", + # "userId": 1, + # "id": 101 + # } + # + # Related: + # + # - Net::HTTP::Post: request class for \HTTP method +POST+. + # - Net::HTTP#post: convenience method for \HTTP method +POST+. + # + def HTTP.post(url, data, header = nil) + start(url.hostname, url.port, + :use_ssl => url.scheme == 'https' ) {|http| + http.post(url, data, header) + } + end + + # Posts data to a host; returns a Net::HTTPResponse object. + # + # Argument +url+ must be a URI; + # argument +data+ must be a hash: + # + # _uri = uri.dup + # _uri.path = '/posts' + # data = {title: 'foo', body: 'bar', userId: 1} + # res = Net::HTTP.post_form(_uri, data) # => # + # puts res.body + # + # Output: + # + # { + # "title": "foo", + # "body": "bar", + # "userId": "1", + # "id": 101 + # } + # + def HTTP.post_form(url, params) + req = Post.new(url) + req.form_data = params + req.basic_auth url.user, url.password if url.user + start(url.hostname, url.port, + :use_ssl => url.scheme == 'https' ) {|http| + http.request(req) + } + end + + # Sends a PUT request to the server; returns a Net::HTTPResponse object. + # + # Argument +url+ must be a URL; + # argument +data+ must be a string: + # + # _uri = uri.dup + # _uri.path = '/posts' + # data = '{"title": "foo", "body": "bar", "userId": 1}' + # headers = {'content-type': 'application/json'} + # res = Net::HTTP.put(_uri, data, headers) # => # + # puts res.body + # + # Output: + # + # { + # "title": "foo", + # "body": "bar", + # "userId": 1, + # "id": 101 + # } + # + # Related: + # + # - Net::HTTP::Put: request class for \HTTP method +PUT+. + # - Net::HTTP#put: convenience method for \HTTP method +PUT+. + # + def HTTP.put(url, data, header = nil) + start(url.hostname, url.port, + :use_ssl => url.scheme == 'https' ) {|http| + http.put(url, data, header) + } + end + + # + # \HTTP session management + # + + # Returns integer +80+, the default port to use for \HTTP requests: + # + # Net::HTTP.default_port # => 80 + # + def HTTP.default_port + http_default_port() + end + + # Returns integer +80+, the default port to use for \HTTP requests: + # + # Net::HTTP.http_default_port # => 80 + # + def HTTP.http_default_port + 80 + end + + # Returns integer +443+, the default port to use for HTTPS requests: + # + # Net::HTTP.https_default_port # => 443 + # + def HTTP.https_default_port + 443 + end + + def HTTP.socket_type #:nodoc: obsolete + BufferedIO + end + + # :call-seq: + # HTTP.start(address, port = nil, p_addr = :ENV, p_port = nil, p_user = nil, p_pass = nil, opts) -> http + # HTTP.start(address, port = nil, p_addr = :ENV, p_port = nil, p_user = nil, p_pass = nil, opts) {|http| ... } -> object + # + # Creates a new \Net::HTTP object, +http+, via \Net::HTTP.new: + # + # - For arguments +address+ and +port+, see Net::HTTP.new. + # - For proxy-defining arguments +p_addr+ through +p_pass+, + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + # - For argument +opts+, see below. + # + # With no block given: + # + # - Calls http.start with no block (see #start), + # which opens a TCP connection and \HTTP session. + # - Returns +http+. + # - The caller should call #finish to close the session: + # + # http = Net::HTTP.start(hostname) + # http.started? # => true + # http.finish + # http.started? # => false + # + # With a block given: + # + # - Calls http.start with the block (see #start), which: + # + # - Opens a TCP connection and \HTTP session. + # - Calls the block, + # which may make any number of requests to the host. + # - Closes the \HTTP session and TCP connection on block exit. + # - Returns the block's value +object+. + # + # - Returns +object+. + # + # Example: + # + # hostname = 'jsonplaceholder.typicode.com' + # Net::HTTP.start(hostname) do |http| + # puts http.get('/todos/1').body + # puts http.get('/todos/2').body + # end + # + # Output: + # + # { + # "userId": 1, + # "id": 1, + # "title": "delectus aut autem", + # "completed": false + # } + # { + # "userId": 1, + # "id": 2, + # "title": "quis ut nam facilis et officia qui", + # "completed": false + # } + # + # If the last argument given is a hash, it is the +opts+ hash, + # where each key is a method or accessor to be called, + # and its value is the value to be set. + # + # The keys may include: + # + # - #ca_file + # - #ca_path + # - #cert + # - #cert_store + # - #ciphers + # - #close_on_empty_response + # - +ipaddr+ (calls #ipaddr=) + # - #keep_alive_timeout + # - #key + # - #open_timeout + # - #read_timeout + # - #ssl_timeout + # - #ssl_version + # - +use_ssl+ (calls #use_ssl=) + # - #verify_callback + # - #verify_depth + # - #verify_mode + # - #write_timeout + # + # Note: If +port+ is +nil+ and opts[:use_ssl] is a truthy value, + # the value passed to +new+ is Net::HTTP.https_default_port, not +port+. + # + def HTTP.start(address, *arg, &block) # :yield: +http+ + arg.pop if opt = Hash.try_convert(arg[-1]) + port, p_addr, p_port, p_user, p_pass = *arg + p_addr = :ENV if arg.size < 2 + port = https_default_port if !port && opt && opt[:use_ssl] + http = new(address, port, p_addr, p_port, p_user, p_pass) + http.ipaddr = opt[:ipaddr] if opt && opt[:ipaddr] + + if opt + if opt[:use_ssl] + opt = {verify_mode: OpenSSL::SSL::VERIFY_PEER}.update(opt) + end + http.methods.grep(/\A(\w+)=\z/) do |meth| + key = $1.to_sym + opt.key?(key) or next + http.__send__(meth, opt[key]) + end + end + + http.start(&block) + end + + class << HTTP + alias newobj new # :nodoc: + end + + # Returns a new \Net::HTTP object +http+ + # (but does not open a TCP connection or \HTTP session). + # + # With only string argument +address+ given + # (and ENV['http_proxy'] undefined or +nil+), + # the returned +http+: + # + # - Has the given address. + # - Has the default port number, Net::HTTP.default_port (80). + # - Has no proxy. + # + # Example: + # + # http = Net::HTTP.new(hostname) + # # => # + # http.address # => "jsonplaceholder.typicode.com" + # http.port # => 80 + # http.proxy? # => false + # + # With integer argument +port+ also given, + # the returned +http+ has the given port: + # + # http = Net::HTTP.new(hostname, 8000) + # # => # + # http.port # => 8000 + # + # For proxy-defining arguments +p_addr+ through +p_no_proxy+, + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + # + def HTTP.new(address, port = nil, p_addr = :ENV, p_port = nil, p_user = nil, p_pass = nil, p_no_proxy = nil, p_use_ssl = nil) + http = super address, port + + if proxy_class? then # from Net::HTTP::Proxy() + http.proxy_from_env = @proxy_from_env + http.proxy_address = @proxy_address + http.proxy_port = @proxy_port + http.proxy_user = @proxy_user + http.proxy_pass = @proxy_pass + http.proxy_use_ssl = @proxy_use_ssl + elsif p_addr == :ENV then + http.proxy_from_env = true + else + if p_addr && p_no_proxy && !URI::Generic.use_proxy?(address, address, port, p_no_proxy) + p_addr = nil + p_port = nil + end + http.proxy_address = p_addr + http.proxy_port = p_port || default_port + http.proxy_user = p_user + http.proxy_pass = p_pass + http.proxy_use_ssl = p_use_ssl + end + + http + end + + class << HTTP + # Allows to set the default configuration that will be used + # when creating a new connection. + # + # Example: + # + # Net::HTTP.default_configuration = { + # read_timeout: 1, + # write_timeout: 1 + # } + # http = Net::HTTP.new(hostname) + # http.open_timeout # => 60 + # http.read_timeout # => 1 + # http.write_timeout # => 1 + # + attr_accessor :default_configuration + end + + # Creates a new \Net::HTTP object for the specified server address, + # without opening the TCP connection or initializing the \HTTP session. + # The +address+ should be a DNS hostname or IP address. + def initialize(address, port = nil) # :nodoc: + defaults = { + keep_alive_timeout: 2, + close_on_empty_response: false, + open_timeout: 60, + read_timeout: 60, + write_timeout: 60, + continue_timeout: nil, + max_retries: 1, + debug_output: nil, + response_body_encoding: false, + ignore_eof: true + } + options = defaults.merge(self.class.default_configuration || {}) + + @address = address + @port = (port || HTTP.default_port) + @ipaddr = nil + @local_host = nil + @local_port = nil + @curr_http_version = HTTPVersion + @keep_alive_timeout = options[:keep_alive_timeout] + @last_communicated = nil + @close_on_empty_response = options[:close_on_empty_response] + @socket = nil + @started = false + @open_timeout = options[:open_timeout] + @read_timeout = options[:read_timeout] + @write_timeout = options[:write_timeout] + @continue_timeout = options[:continue_timeout] + @max_retries = options[:max_retries] + @debug_output = options[:debug_output] + @response_body_encoding = options[:response_body_encoding] + @ignore_eof = options[:ignore_eof] + + @proxy_from_env = false + @proxy_uri = nil + @proxy_address = nil + @proxy_port = nil + @proxy_user = nil + @proxy_pass = nil + @proxy_use_ssl = nil + + @use_ssl = false + @ssl_context = nil + @ssl_session = nil + @sspi_enabled = false + SSL_IVNAMES.each do |ivname| + instance_variable_set ivname, nil + end + end + + # Returns a string representation of +self+: + # + # Net::HTTP.new(hostname).inspect + # # => "#" + # + def inspect + "#<#{self.class} #{@address}:#{@port} open=#{started?}>" + end + + # *WARNING* This method opens a serious security hole. + # Never use this method in production code. + # + # Sets the output stream for debugging: + # + # http = Net::HTTP.new(hostname) + # File.open('t.tmp', 'w') do |file| + # http.set_debug_output(file) + # http.start + # http.get('/nosuch/1') + # http.finish + # end + # puts File.read('t.tmp') + # + # Output: + # + # opening connection to jsonplaceholder.typicode.com:80... + # opened + # <- "GET /nosuch/1 HTTP/1.1\r\nAccept-Encoding: gzip;q=1.0,deflate;q=0.6,identity;q=0.3\r\nAccept: */*\r\nUser-Agent: Ruby\r\nHost: jsonplaceholder.typicode.com\r\n\r\n" + # -> "HTTP/1.1 404 Not Found\r\n" + # -> "Date: Mon, 12 Dec 2022 21:14:11 GMT\r\n" + # -> "Content-Type: application/json; charset=utf-8\r\n" + # -> "Content-Length: 2\r\n" + # -> "Connection: keep-alive\r\n" + # -> "X-Powered-By: Express\r\n" + # -> "X-Ratelimit-Limit: 1000\r\n" + # -> "X-Ratelimit-Remaining: 999\r\n" + # -> "X-Ratelimit-Reset: 1670879660\r\n" + # -> "Vary: Origin, Accept-Encoding\r\n" + # -> "Access-Control-Allow-Credentials: true\r\n" + # -> "Cache-Control: max-age=43200\r\n" + # -> "Pragma: no-cache\r\n" + # -> "Expires: -1\r\n" + # -> "X-Content-Type-Options: nosniff\r\n" + # -> "Etag: W/\"2-vyGp6PvFo4RvsFtPoIWeCReyIC8\"\r\n" + # -> "Via: 1.1 vegur\r\n" + # -> "CF-Cache-Status: MISS\r\n" + # -> "Server-Timing: cf-q-config;dur=1.3000000762986e-05\r\n" + # -> "Report-To: {\"endpoints\":[{\"url\":\"https:\\/\\/a.nel.cloudflare.com\\/report\\/v3?s=yOr40jo%2BwS1KHzhTlVpl54beJ5Wx2FcG4gGV0XVrh3X9OlR5q4drUn2dkt5DGO4GDcE%2BVXT7CNgJvGs%2BZleIyMu8CLieFiDIvOviOY3EhHg94m0ZNZgrEdpKD0S85S507l1vsEwEHkoTm%2Ff19SiO\"}],\"group\":\"cf-nel\",\"max_age\":604800}\r\n" + # -> "NEL: {\"success_fraction\":0,\"report_to\":\"cf-nel\",\"max_age\":604800}\r\n" + # -> "Server: cloudflare\r\n" + # -> "CF-RAY: 778977dc484ce591-DFW\r\n" + # -> "alt-svc: h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400\r\n" + # -> "\r\n" + # reading 2 bytes... + # -> "{}" + # read 2 bytes + # Conn keep-alive + # + def set_debug_output(output) + warn 'Net::HTTP#set_debug_output called after HTTP started', uplevel: 1 if started? + @debug_output = output + end + + # Returns the string host name or host IP given as argument +address+ in ::new. + attr_reader :address + + # Returns the integer port number given as argument +port+ in ::new. + attr_reader :port + + # Sets or returns the string local host used to establish the connection; + # initially +nil+. + attr_accessor :local_host + + # Sets or returns the integer local port used to establish the connection; + # initially +nil+. + attr_accessor :local_port + + # Returns the encoding to use for the response body; + # see #response_body_encoding=. + attr_reader :response_body_encoding + + # Sets the encoding to be used for the response body; + # returns the encoding. + # + # The given +value+ may be: + # + # - An Encoding object. + # - The name of an encoding. + # - An alias for an encoding name. + # + # See {Encoding}[https://docs.ruby-lang.org/en/master/Encoding.html]. + # + # Examples: + # + # http = Net::HTTP.new(hostname) + # http.response_body_encoding = Encoding::US_ASCII # => # + # http.response_body_encoding = 'US-ASCII' # => "US-ASCII" + # http.response_body_encoding = 'ASCII' # => "ASCII" + # + def response_body_encoding=(value) + value = Encoding.find(value) if value.is_a?(String) + @response_body_encoding = value + end + + # Sets whether to determine the proxy from environment variable + # 'ENV['http_proxy']'; + # see {Proxy Using ENV['http_proxy']}[rdoc-ref:Net::HTTP@Proxy+Using+-27ENV-5B-27http_proxy-27-5D-27]. + attr_writer :proxy_from_env + + # Sets the proxy address; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + attr_writer :proxy_address + + # Sets the proxy port; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + attr_writer :proxy_port + + # Sets the proxy user; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + attr_writer :proxy_user + + # Sets the proxy password; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + attr_writer :proxy_pass + attr_writer :proxy_use_ssl + + # Returns the IP address for the connection. + # + # If the session has not been started, + # returns the value set by #ipaddr=, + # or +nil+ if it has not been set: + # + # http = Net::HTTP.new(hostname) + # http.ipaddr # => nil + # http.ipaddr = '172.67.155.76' + # http.ipaddr # => "172.67.155.76" + # + # If the session has been started, + # returns the IP address from the socket: + # + # http = Net::HTTP.new(hostname) + # http.start + # http.ipaddr # => "172.67.155.76" + # http.finish + # + def ipaddr + started? ? @socket.io.peeraddr[3] : @ipaddr + end + + # Sets the IP address for the connection: + # + # http = Net::HTTP.new(hostname) + # http.ipaddr # => nil + # http.ipaddr = '172.67.155.76' + # http.ipaddr # => "172.67.155.76" + # + # The IP address may not be set if the session has been started. + def ipaddr=(addr) + raise IOError, "ipaddr value changed, but session already started" if started? + @ipaddr = addr + end + + # Sets or returns the numeric (\Integer or \Float) number of seconds + # to wait for a connection to open; + # initially 60. + # If the connection is not made in the given interval, + # an exception is raised. + attr_accessor :open_timeout + + # Returns the numeric (\Integer or \Float) number of seconds + # to wait for one block to be read (via one read(2) call); + # see #read_timeout=. + attr_reader :read_timeout + + # Returns the numeric (\Integer or \Float) number of seconds + # to wait for one block to be written (via one write(2) call); + # see #write_timeout=. + attr_reader :write_timeout + + # Sets the maximum number of times to retry an idempotent request in case of + # \Net::ReadTimeout, IOError, EOFError, Errno::ECONNRESET, + # Errno::ECONNABORTED, Errno::EPIPE, OpenSSL::SSL::SSLError, + # Timeout::Error. + # The initial value is 1. + # + # Argument +retries+ must be a non-negative numeric value: + # + # http = Net::HTTP.new(hostname) + # http.max_retries = 2 # => 2 + # http.max_retries # => 2 + # + def max_retries=(retries) + retries = retries.to_int + if retries < 0 + raise ArgumentError, 'max_retries should be non-negative integer number' + end + @max_retries = retries + end + + # Returns the maximum number of times to retry an idempotent request; + # see #max_retries=. + attr_reader :max_retries + + # Sets the read timeout, in seconds, for +self+ to integer +sec+; + # the initial value is 60. + # + # Argument +sec+ must be a non-negative numeric value: + # + # http = Net::HTTP.new(hostname) + # http.read_timeout # => 60 + # http.get('/todos/1') # => # + # http.read_timeout = 0 + # http.get('/todos/1') # Raises Net::ReadTimeout. + # + def read_timeout=(sec) + @socket.read_timeout = sec if @socket + @read_timeout = sec + end + + # Sets the write timeout, in seconds, for +self+ to integer +sec+; + # the initial value is 60. + # + # Argument +sec+ must be a non-negative numeric value: + # + # _uri = uri.dup + # _uri.path = '/posts' + # body = 'bar' * 200000 + # data = < 60 + # http.post(_uri.path, data, headers) + # # => # + # http.write_timeout = 0 + # http.post(_uri.path, data, headers) # Raises Net::WriteTimeout. + # + def write_timeout=(sec) + @socket.write_timeout = sec if @socket + @write_timeout = sec + end + + # Returns the continue timeout value; + # see continue_timeout=. + attr_reader :continue_timeout + + # Sets the continue timeout value, + # which is the number of seconds to wait for an expected 100 Continue response. + # If the \HTTP object does not receive a response in this many seconds + # it sends the request body. + def continue_timeout=(sec) + @socket.continue_timeout = sec if @socket + @continue_timeout = sec + end + + # Sets or returns the numeric (\Integer or \Float) number of seconds + # to keep the connection open after a request is sent; + # initially 2. + # If a new request is made during the given interval, + # the still-open connection is used; + # otherwise the connection will have been closed + # and a new connection is opened. + attr_accessor :keep_alive_timeout + + # Sets or returns whether to ignore end-of-file when reading a response body + # with Content-Length headers; + # initially +true+. + attr_accessor :ignore_eof + + # Returns +true+ if the \HTTP session has been started: + # + # http = Net::HTTP.new(hostname) + # http.started? # => false + # http.start + # http.started? # => true + # http.finish # => nil + # http.started? # => false + # + # Net::HTTP.start(hostname) do |http| + # http.started? + # end # => true + # http.started? # => false + # + def started? + @started + end + + alias active? started? #:nodoc: obsolete + + # Sets or returns whether to close the connection when the response is empty; + # initially +false+. + attr_accessor :close_on_empty_response + + # Returns +true+ if +self+ uses SSL, +false+ otherwise. + # See Net::HTTP#use_ssl=. + def use_ssl? + @use_ssl + end + + # Sets whether a new session is to use + # {Transport Layer Security}[https://en.wikipedia.org/wiki/Transport_Layer_Security]: + # + # Raises IOError if attempting to change during a session. + # + # Raises OpenSSL::SSL::SSLError if the port is not an HTTPS port. + def use_ssl=(flag) + flag = flag ? true : false + if started? and @use_ssl != flag + raise IOError, "use_ssl value changed, but session already started" + end + @use_ssl = flag + end + + SSL_ATTRIBUTES = [ + :ca_file, + :ca_path, + :cert, + :cert_store, + :ciphers, + :extra_chain_cert, + :key, + :ssl_timeout, + :ssl_version, + :min_version, + :max_version, + :verify_callback, + :verify_depth, + :verify_mode, + :verify_hostname, + ] # :nodoc: + + SSL_IVNAMES = SSL_ATTRIBUTES.map { |a| "@#{a}".to_sym } # :nodoc: + + # Sets or returns the path to a CA certification file in PEM format. + attr_accessor :ca_file + + # Sets or returns the path of to CA directory + # containing certification files in PEM format. + attr_accessor :ca_path + + # Sets or returns the OpenSSL::X509::Certificate object + # to be used for client certification. + attr_accessor :cert + + # Sets or returns the X509::Store to be used for verifying peer certificate. + attr_accessor :cert_store + + # Sets or returns the available SSL ciphers. + # See {OpenSSL::SSL::SSLContext#ciphers=}[https://docs.ruby-lang.org/en/master/OpenSSL/SSL/SSLContext.html#method-i-ciphers-3D]. + attr_accessor :ciphers + + # Sets or returns the extra X509 certificates to be added to the certificate chain. + # See {OpenSSL::SSL::SSLContext#add_certificate}[https://docs.ruby-lang.org/en/master/OpenSSL/SSL/SSLContext.html#method-i-add_certificate]. + attr_accessor :extra_chain_cert + + # Sets or returns the OpenSSL::PKey::RSA or OpenSSL::PKey::DSA object. + attr_accessor :key + + # Sets or returns the SSL timeout seconds. + attr_accessor :ssl_timeout + + # Sets or returns the SSL version. + # See {OpenSSL::SSL::SSLContext#ssl_version=}[https://docs.ruby-lang.org/en/master/OpenSSL/SSL/SSLContext.html#method-i-ssl_version-3D]. + attr_accessor :ssl_version + + # Sets or returns the minimum SSL version. + # See {OpenSSL::SSL::SSLContext#min_version=}[https://docs.ruby-lang.org/en/master/OpenSSL/SSL/SSLContext.html#method-i-min_version-3D]. + attr_accessor :min_version + + # Sets or returns the maximum SSL version. + # See {OpenSSL::SSL::SSLContext#max_version=}[https://docs.ruby-lang.org/en/master/OpenSSL/SSL/SSLContext.html#method-i-max_version-3D]. + attr_accessor :max_version + + # Sets or returns the callback for the server certification verification. + attr_accessor :verify_callback + + # Sets or returns the maximum depth for the certificate chain verification. + attr_accessor :verify_depth + + # Sets or returns the flags for server the certification verification + # at the beginning of the SSL/TLS session. + # OpenSSL::SSL::VERIFY_NONE or OpenSSL::SSL::VERIFY_PEER are acceptable. + attr_accessor :verify_mode + + # Sets or returns whether to verify that the server certificate is valid + # for the hostname. + # See {OpenSSL::SSL::SSLContext#verify_hostname=}[https://docs.ruby-lang.org/en/master/OpenSSL/SSL/SSLContext.html#attribute-i-verify_mode]. + attr_accessor :verify_hostname + + # Returns the X509 certificate chain (an array of strings) + # for the session's socket peer, + # or +nil+ if none. + def peer_cert + if not use_ssl? or not @socket + return nil + end + @socket.io.peer_cert + end + + # Starts an \HTTP session. + # + # Without a block, returns +self+: + # + # http = Net::HTTP.new(hostname) + # # => # + # http.start + # # => # + # http.started? # => true + # http.finish + # + # With a block, calls the block with +self+, + # finishes the session when the block exits, + # and returns the block's value: + # + # http.start do |http| + # http + # end + # # => # + # http.started? # => false + # + def start # :yield: http + raise IOError, 'HTTP session already opened' if @started + if block_given? + begin + do_start + return yield(self) + ensure + do_finish + end + end + do_start + self + end + + def do_start + connect + @started = true + end + private :do_start + + def connect + if use_ssl? + # reference early to load OpenSSL before connecting, + # as OpenSSL may take time to load. + @ssl_context = OpenSSL::SSL::SSLContext.new + end + + if proxy? then + conn_addr = proxy_address + conn_port = proxy_port + else + conn_addr = conn_address + conn_port = port + end + + debug "opening connection to #{conn_addr}:#{conn_port}..." + s = Timeout.timeout(@open_timeout, Net::OpenTimeout) { + begin + TCPSocket.open(conn_addr, conn_port, @local_host, @local_port) + rescue => e + raise e, "Failed to open TCP connection to " + + "#{conn_addr}:#{conn_port} (#{e.message})" + end + } + s.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) + debug "opened" + if use_ssl? + if proxy? + if @proxy_use_ssl + proxy_sock = OpenSSL::SSL::SSLSocket.new(s) + ssl_socket_connect(proxy_sock, @open_timeout) + else + proxy_sock = s + end + proxy_sock = BufferedIO.new(proxy_sock, read_timeout: @read_timeout, + write_timeout: @write_timeout, + continue_timeout: @continue_timeout, + debug_output: @debug_output) + buf = +"CONNECT #{conn_address}:#{@port} HTTP/#{HTTPVersion}\r\n" \ + "Host: #{@address}:#{@port}\r\n" + if proxy_user + credential = ["#{proxy_user}:#{proxy_pass}"].pack('m0') + buf << "Proxy-Authorization: Basic #{credential}\r\n" + end + buf << "\r\n" + proxy_sock.write(buf) + HTTPResponse.read_new(proxy_sock).value + # assuming nothing left in buffers after successful CONNECT response + end + + ssl_parameters = Hash.new + iv_list = instance_variables + SSL_IVNAMES.each_with_index do |ivname, i| + if iv_list.include?(ivname) + value = instance_variable_get(ivname) + unless value.nil? + ssl_parameters[SSL_ATTRIBUTES[i]] = value + end + end + end + @ssl_context.set_params(ssl_parameters) + unless @ssl_context.session_cache_mode.nil? # a dummy method on JRuby + @ssl_context.session_cache_mode = + OpenSSL::SSL::SSLContext::SESSION_CACHE_CLIENT | + OpenSSL::SSL::SSLContext::SESSION_CACHE_NO_INTERNAL_STORE + end + if @ssl_context.respond_to?(:session_new_cb) # not implemented under JRuby + @ssl_context.session_new_cb = proc {|sock, sess| @ssl_session = sess } + end + + # Still do the post_connection_check below even if connecting + # to IP address + verify_hostname = @ssl_context.verify_hostname + + # Server Name Indication (SNI) RFC 3546/6066 + case @address + when Resolv::IPv4::Regex, Resolv::IPv6::Regex + # don't set SNI, as IP addresses in SNI is not valid + # per RFC 6066, section 3. + + # Avoid openssl warning + @ssl_context.verify_hostname = false + else + ssl_host_address = @address + end + + debug "starting SSL for #{conn_addr}:#{conn_port}..." + s = OpenSSL::SSL::SSLSocket.new(s, @ssl_context) + s.sync_close = true + s.hostname = ssl_host_address if s.respond_to?(:hostname=) && ssl_host_address + + if @ssl_session and + Process.clock_gettime(Process::CLOCK_REALTIME) < @ssl_session.time.to_f + @ssl_session.timeout + s.session = @ssl_session + end + ssl_socket_connect(s, @open_timeout) + if (@ssl_context.verify_mode != OpenSSL::SSL::VERIFY_NONE) && verify_hostname + s.post_connection_check(@address) + end + debug "SSL established, protocol: #{s.ssl_version}, cipher: #{s.cipher[0]}" + end + @socket = BufferedIO.new(s, read_timeout: @read_timeout, + write_timeout: @write_timeout, + continue_timeout: @continue_timeout, + debug_output: @debug_output) + @last_communicated = nil + on_connect + rescue => exception + if s + debug "Conn close because of connect error #{exception}" + s.close + end + raise + end + private :connect + + def on_connect + end + private :on_connect + + # Finishes the \HTTP session: + # + # http = Net::HTTP.new(hostname) + # http.start + # http.started? # => true + # http.finish # => nil + # http.started? # => false + # + # Raises IOError if not in a session. + def finish + raise IOError, 'HTTP session not yet started' unless started? + do_finish + end + + def do_finish + @started = false + @socket.close if @socket + @socket = nil + end + private :do_finish + + # + # proxy + # + + public + + # no proxy + @is_proxy_class = false + @proxy_from_env = false + @proxy_addr = nil + @proxy_port = nil + @proxy_user = nil + @proxy_pass = nil + @proxy_use_ssl = nil + + # Creates an \HTTP proxy class which behaves like \Net::HTTP, but + # performs all access via the specified proxy. + # + # This class is obsolete. You may pass these same parameters directly to + # \Net::HTTP.new. See Net::HTTP.new for details of the arguments. + def HTTP.Proxy(p_addr = :ENV, p_port = nil, p_user = nil, p_pass = nil, p_use_ssl = nil) #:nodoc: + return self unless p_addr + + Class.new(self) { + @is_proxy_class = true + + if p_addr == :ENV then + @proxy_from_env = true + @proxy_address = nil + @proxy_port = nil + else + @proxy_from_env = false + @proxy_address = p_addr + @proxy_port = p_port || default_port + end + + @proxy_user = p_user + @proxy_pass = p_pass + @proxy_use_ssl = p_use_ssl + } + end + + class << HTTP + # Returns true if self is a class which was created by HTTP::Proxy. + def proxy_class? + defined?(@is_proxy_class) ? @is_proxy_class : false + end + + # Returns the address of the proxy host, or +nil+ if none; + # see Net::HTTP@Proxy+Server. + attr_reader :proxy_address + + # Returns the port number of the proxy host, or +nil+ if none; + # see Net::HTTP@Proxy+Server. + attr_reader :proxy_port + + # Returns the user name for accessing the proxy, or +nil+ if none; + # see Net::HTTP@Proxy+Server. + attr_reader :proxy_user + + # Returns the password for accessing the proxy, or +nil+ if none; + # see Net::HTTP@Proxy+Server. + attr_reader :proxy_pass + + # Use SSL when talking to the proxy. If Net::HTTP does not use a proxy, nil. + attr_reader :proxy_use_ssl + end + + # Returns +true+ if a proxy server is defined, +false+ otherwise; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + def proxy? + !!(@proxy_from_env ? proxy_uri : @proxy_address) + end + + # Returns +true+ if the proxy server is defined in the environment, + # +false+ otherwise; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + def proxy_from_env? + @proxy_from_env + end + + # The proxy URI determined from the environment for this connection. + def proxy_uri # :nodoc: + return if @proxy_uri == false + @proxy_uri ||= URI::HTTP.new( + "http", nil, address, port, nil, nil, nil, nil, nil + ).find_proxy || false + @proxy_uri || nil + end + + # Returns the address of the proxy server, if defined, +nil+ otherwise; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + def proxy_address + if @proxy_from_env then + proxy_uri&.hostname + else + @proxy_address + end + end + + # Returns the port number of the proxy server, if defined, +nil+ otherwise; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + def proxy_port + if @proxy_from_env then + proxy_uri&.port + else + @proxy_port + end + end + + # Returns the user name of the proxy server, if defined, +nil+ otherwise; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + def proxy_user + if @proxy_from_env + user = proxy_uri&.user + unescape(user) if user + else + @proxy_user + end + end + + # Returns the password of the proxy server, if defined, +nil+ otherwise; + # see {Proxy Server}[rdoc-ref:Net::HTTP@Proxy+Server]. + def proxy_pass + if @proxy_from_env + pass = proxy_uri&.password + unescape(pass) if pass + else + @proxy_pass + end + end + + alias proxyaddr proxy_address #:nodoc: obsolete + alias proxyport proxy_port #:nodoc: obsolete + + private + + def unescape(value) + require 'cgi/util' + CGI.unescape(value) + end + + # without proxy, obsolete + + def conn_address # :nodoc: + @ipaddr || address() + end + + def conn_port # :nodoc: + port() + end + + def edit_path(path) + if proxy? + if path.start_with?("ftp://") || use_ssl? + path + else + "http://#{addr_port}#{path}" + end + else + path + end + end + + # + # HTTP operations + # + + public + + # :call-seq: + # get(path, initheader = nil) {|res| ... } + # + # Sends a GET request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Get object + # created from string +path+ and initial headers hash +initheader+. + # + # With a block given, calls the block with the response body: + # + # http = Net::HTTP.new(hostname) + # http.get('/todos/1') do |res| + # p res + # end # => # + # + # Output: + # + # "{\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"delectus aut autem\",\n \"completed\": false\n}" + # + # With no block given, simply returns the response object: + # + # http.get('/') # => # + # + # Related: + # + # - Net::HTTP::Get: request class for \HTTP method GET. + # - Net::HTTP.get: sends GET request, returns response body. + # + def get(path, initheader = nil, dest = nil, &block) # :yield: +body_segment+ + res = nil + + request(Get.new(path, initheader)) {|r| + r.read_body dest, &block + res = r + } + res + end + + # Sends a HEAD request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Head object + # created from string +path+ and initial headers hash +initheader+: + # + # res = http.head('/todos/1') # => # + # res.body # => nil + # res.to_hash.take(3) + # # => + # [["date", ["Wed, 15 Feb 2023 15:25:42 GMT"]], + # ["content-type", ["application/json; charset=utf-8"]], + # ["connection", ["close"]]] + # + def head(path, initheader = nil) + request(Head.new(path, initheader)) + end + + # :call-seq: + # post(path, data, initheader = nil) {|res| ... } + # + # Sends a POST request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Post object + # created from string +path+, string +data+, and initial headers hash +initheader+. + # + # With a block given, calls the block with the response body: + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.post('/todos', data) do |res| + # p res + # end # => # + # + # Output: + # + # "{\n \"{\\\"userId\\\": 1, \\\"id\\\": 1, \\\"title\\\": \\\"delectus aut autem\\\", \\\"completed\\\": false}\": \"\",\n \"id\": 201\n}" + # + # With no block given, simply returns the response object: + # + # http.post('/todos', data) # => # + # + # Related: + # + # - Net::HTTP::Post: request class for \HTTP method POST. + # - Net::HTTP.post: sends POST request, returns response body. + # + def post(path, data, initheader = nil, dest = nil, &block) # :yield: +body_segment+ + send_entity(path, data, initheader, dest, Post, &block) + end + + # :call-seq: + # patch(path, data, initheader = nil) {|res| ... } + # + # Sends a PATCH request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Patch object + # created from string +path+, string +data+, and initial headers hash +initheader+. + # + # With a block given, calls the block with the response body: + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.patch('/todos/1', data) do |res| + # p res + # end # => # + # + # Output: + # + # "{\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"delectus aut autem\",\n \"completed\": false,\n \"{\\\"userId\\\": 1, \\\"id\\\": 1, \\\"title\\\": \\\"delectus aut autem\\\", \\\"completed\\\": false}\": \"\"\n}" + # + # With no block given, simply returns the response object: + # + # http.patch('/todos/1', data) # => # + # + def patch(path, data, initheader = nil, dest = nil, &block) # :yield: +body_segment+ + send_entity(path, data, initheader, dest, Patch, &block) + end + + # Sends a PUT request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Put object + # created from string +path+, string +data+, and initial headers hash +initheader+. + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.put('/todos/1', data) # => # + # + # Related: + # + # - Net::HTTP::Put: request class for \HTTP method PUT. + # - Net::HTTP.put: sends PUT request, returns response body. + # + def put(path, data, initheader = nil) + request(Put.new(path, initheader), data) + end + + # Sends a PROPPATCH request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Proppatch object + # created from string +path+, string +body+, and initial headers hash +initheader+. + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.proppatch('/todos/1', data) + # + def proppatch(path, body, initheader = nil) + request(Proppatch.new(path, initheader), body) + end + + # Sends a LOCK request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Lock object + # created from string +path+, string +body+, and initial headers hash +initheader+. + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.lock('/todos/1', data) + # + def lock(path, body, initheader = nil) + request(Lock.new(path, initheader), body) + end + + # Sends an UNLOCK request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Unlock object + # created from string +path+, string +body+, and initial headers hash +initheader+. + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.unlock('/todos/1', data) + # + def unlock(path, body, initheader = nil) + request(Unlock.new(path, initheader), body) + end + + # Sends an Options request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Options object + # created from string +path+ and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.options('/') + # + def options(path, initheader = nil) + request(Options.new(path, initheader)) + end + + # Sends a PROPFIND request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Propfind object + # created from string +path+, string +body+, and initial headers hash +initheader+. + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http = Net::HTTP.new(hostname) + # http.propfind('/todos/1', data) + # + def propfind(path, body = nil, initheader = {'Depth' => '0'}) + request(Propfind.new(path, initheader), body) + end + + # Sends a DELETE request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Delete object + # created from string +path+ and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.delete('/todos/1') + # + def delete(path, initheader = {'Depth' => 'Infinity'}) + request(Delete.new(path, initheader)) + end + + # Sends a MOVE request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Move object + # created from string +path+ and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.move('/todos/1') + # + def move(path, initheader = nil) + request(Move.new(path, initheader)) + end + + # Sends a COPY request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Copy object + # created from string +path+ and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.copy('/todos/1') + # + def copy(path, initheader = nil) + request(Copy.new(path, initheader)) + end + + # Sends a MKCOL request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Mkcol object + # created from string +path+, string +body+, and initial headers hash +initheader+. + # + # data = '{"userId": 1, "id": 1, "title": "delectus aut autem", "completed": false}' + # http.mkcol('/todos/1', data) + # http = Net::HTTP.new(hostname) + # + def mkcol(path, body = nil, initheader = nil) + request(Mkcol.new(path, initheader), body) + end + + # Sends a TRACE request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Trace object + # created from string +path+ and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.trace('/todos/1') + # + def trace(path, initheader = nil) + request(Trace.new(path, initheader)) + end + + # Sends a GET request to the server; + # forms the response into a Net::HTTPResponse object. + # + # The request is based on the Net::HTTP::Get object + # created from string +path+ and initial headers hash +initheader+. + # + # With no block given, returns the response object: + # + # http = Net::HTTP.new(hostname) + # http.request_get('/todos') # => # + # + # With a block given, calls the block with the response object + # and returns the response object: + # + # http.request_get('/todos') do |res| + # p res + # end # => # + # + # Output: + # + # # + # + def request_get(path, initheader = nil, &block) # :yield: +response+ + request(Get.new(path, initheader), &block) + end + + # Sends a HEAD request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Head object + # created from string +path+ and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.head('/todos/1') # => # + # + def request_head(path, initheader = nil, &block) + request(Head.new(path, initheader), &block) + end + + # Sends a POST request to the server; + # forms the response into a Net::HTTPResponse object. + # + # The request is based on the Net::HTTP::Post object + # created from string +path+, string +data+, and initial headers hash +initheader+. + # + # With no block given, returns the response object: + # + # http = Net::HTTP.new(hostname) + # http.post('/todos', 'xyzzy') + # # => # + # + # With a block given, calls the block with the response body + # and returns the response object: + # + # http.post('/todos', 'xyzzy') do |res| + # p res + # end # => # + # + # Output: + # + # "{\n \"xyzzy\": \"\",\n \"id\": 201\n}" + # + def request_post(path, data, initheader = nil, &block) # :yield: +response+ + request Post.new(path, initheader), data, &block + end + + # Sends a PUT request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTP::Put object + # created from string +path+, string +data+, and initial headers hash +initheader+. + # + # http = Net::HTTP.new(hostname) + # http.put('/todos/1', 'xyzzy') + # # => # + # + def request_put(path, data, initheader = nil, &block) #:nodoc: + request Put.new(path, initheader), data, &block + end + + alias get2 request_get #:nodoc: obsolete + alias head2 request_head #:nodoc: obsolete + alias post2 request_post #:nodoc: obsolete + alias put2 request_put #:nodoc: obsolete + + # Sends an \HTTP request to the server; + # returns an instance of a subclass of Net::HTTPResponse. + # + # The request is based on the Net::HTTPRequest object + # created from string +path+, string +data+, and initial headers hash +header+. + # That object is an instance of the + # {subclass of Net::HTTPRequest}[rdoc-ref:Net::HTTPRequest@Request+Subclasses], + # that corresponds to the given uppercase string +name+, + # which must be + # an {HTTP request method}[https://en.wikipedia.org/wiki/HTTP#Request_methods] + # or a {WebDAV request method}[https://en.wikipedia.org/wiki/WebDAV#Implementation]. + # + # Examples: + # + # http = Net::HTTP.new(hostname) + # http.send_request('GET', '/todos/1') + # # => # + # http.send_request('POST', '/todos', 'xyzzy') + # # => # + # + def send_request(name, path, data = nil, header = nil) + has_response_body = name != 'HEAD' + r = HTTPGenericRequest.new(name,(data ? true : false),has_response_body,path,header) + request r, data + end + + # Sends the given request +req+ to the server; + # forms the response into a Net::HTTPResponse object. + # + # The given +req+ must be an instance of a + # {subclass of Net::HTTPRequest}[rdoc-ref:Net::HTTPRequest@Request+Subclasses]. + # Argument +body+ should be given only if needed for the request. + # + # With no block given, returns the response object: + # + # http = Net::HTTP.new(hostname) + # + # req = Net::HTTP::Get.new('/todos/1') + # http.request(req) + # # => # + # + # req = Net::HTTP::Post.new('/todos') + # http.request(req, 'xyzzy') + # # => # + # + # With a block given, calls the block with the response and returns the response: + # + # req = Net::HTTP::Get.new('/todos/1') + # http.request(req) do |res| + # p res + # end # => # + # + # Output: + # + # # + # + def request(req, body = nil, &block) # :yield: +response+ + unless started? + start { + req['connection'] ||= 'close' + return request(req, body, &block) + } + end + if proxy_user() + req.proxy_basic_auth proxy_user(), proxy_pass() unless use_ssl? + end + req.set_body_internal body + res = transport_request(req, &block) + if sspi_auth?(res) + sspi_auth(req) + res = transport_request(req, &block) + end + res + end + + private + + # Executes a request which uses a representation + # and returns its body. + def send_entity(path, data, initheader, dest, type, &block) + res = nil + request(type.new(path, initheader), data) {|r| + r.read_body dest, &block + res = r + } + res + end + + IDEMPOTENT_METHODS_ = %w/GET HEAD PUT DELETE OPTIONS TRACE/ # :nodoc: + + def transport_request(req) + count = 0 + begin + begin_transport req + res = catch(:response) { + begin + req.exec @socket, @curr_http_version, edit_path(req.path) + rescue Errno::EPIPE + # Failure when writing full request, but we can probably + # still read the received response. + end + + begin + res = HTTPResponse.read_new(@socket) + res.decode_content = req.decode_content + res.body_encoding = @response_body_encoding + res.ignore_eof = @ignore_eof + end while res.kind_of?(HTTPInformation) + + res.uri = req.uri + + res + } + res.reading_body(@socket, req.response_body_permitted?) { + if block_given? + count = max_retries # Don't restart in the middle of a download + yield res + end + } + rescue Net::OpenTimeout + raise + rescue Net::ReadTimeout, IOError, EOFError, + Errno::ECONNRESET, Errno::ECONNABORTED, Errno::EPIPE, Errno::ETIMEDOUT, + # avoid a dependency on OpenSSL + defined?(OpenSSL::SSL) ? OpenSSL::SSL::SSLError : IOError, + Timeout::Error => exception + if count < max_retries && IDEMPOTENT_METHODS_.include?(req.method) + count += 1 + @socket.close if @socket + debug "Conn close because of error #{exception}, and retry" + retry + end + debug "Conn close because of error #{exception}" + @socket.close if @socket + raise + end + + end_transport req, res + res + rescue => exception + debug "Conn close because of error #{exception}" + @socket.close if @socket + raise exception + end + + def begin_transport(req) + if @socket.closed? + connect + elsif @last_communicated + if @last_communicated + @keep_alive_timeout < Process.clock_gettime(Process::CLOCK_MONOTONIC) + debug 'Conn close because of keep_alive_timeout' + @socket.close + connect + elsif @socket.io.to_io.wait_readable(0) && @socket.eof? + debug "Conn close because of EOF" + @socket.close + connect + end + end + + if not req.response_body_permitted? and @close_on_empty_response + req['connection'] ||= 'close' + end + + req.update_uri address, port, use_ssl? + req['host'] ||= addr_port() + end + + def end_transport(req, res) + @curr_http_version = res.http_version + @last_communicated = nil + if @socket.closed? + debug 'Conn socket closed' + elsif not res.body and @close_on_empty_response + debug 'Conn close' + @socket.close + elsif keep_alive?(req, res) + debug 'Conn keep-alive' + @last_communicated = Process.clock_gettime(Process::CLOCK_MONOTONIC) + else + debug 'Conn close' + @socket.close + end + end + + def keep_alive?(req, res) + return false if req.connection_close? + if @curr_http_version <= '1.0' + res.connection_keep_alive? + else # HTTP/1.1 or later + not res.connection_close? + end + end + + def sspi_auth?(res) + return false unless @sspi_enabled + if res.kind_of?(HTTPProxyAuthenticationRequired) and + proxy? and res["Proxy-Authenticate"].include?("Negotiate") + begin + require 'win32/sspi' + true + rescue LoadError + false + end + else + false + end + end + + def sspi_auth(req) + n = Win32::SSPI::NegotiateAuth.new + req["Proxy-Authorization"] = "Negotiate #{n.get_initial_token}" + # Some versions of ISA will close the connection if this isn't present. + req["Connection"] = "Keep-Alive" + req["Proxy-Connection"] = "Keep-Alive" + res = transport_request(req) + authphrase = res["Proxy-Authenticate"] or return res + req["Proxy-Authorization"] = "Negotiate #{n.complete_authentication(authphrase)}" + rescue => err + raise HTTPAuthenticationError.new('HTTP authentication failed', err) + end + + # + # utils + # + + private + + def addr_port + addr = address + addr = "[#{addr}]" if addr.include?(":") + default_port = use_ssl? ? HTTP.https_default_port : HTTP.http_default_port + default_port == port ? addr : "#{addr}:#{port}" + end + + # Adds a message to debugging output + def debug(msg) + return unless @debug_output + @debug_output << msg + @debug_output << "\n" + end + + alias_method :D, :debug + end + + # for backward compatibility until Ruby 3.5 + # https://bugs.ruby-lang.org/issues/20900 + # https://github.com/bblimke/webmock/pull/1081 + HTTPSession = HTTP + deprecate_constant :HTTPSession +end + +require_relative 'http/exceptions' + +require_relative 'http/header' + +require_relative 'http/generic_request' +require_relative 'http/request' +require_relative 'http/requests' + +require_relative 'http/response' +require_relative 'http/responses' + +require_relative 'http/proxy_delta' diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/exceptions.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/exceptions.rb new file mode 100644 index 000000000..ceec8f7b0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/exceptions.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true +module Net + # Net::HTTP exception class. + # You cannot use Net::HTTPExceptions directly; instead, you must use + # its subclasses. + module HTTPExceptions + def initialize(msg, res) #:nodoc: + super msg + @response = res + end + attr_reader :response + alias data response #:nodoc: obsolete + end + + class HTTPError < ProtocolError + include HTTPExceptions + end + + class HTTPRetriableError < ProtoRetriableError + include HTTPExceptions + end + + class HTTPClientException < ProtoServerError + include HTTPExceptions + end + + class HTTPFatalError < ProtoFatalError + include HTTPExceptions + end + + # We cannot use the name "HTTPServerError", it is the name of the response. + HTTPServerException = HTTPClientException # :nodoc: + deprecate_constant(:HTTPServerException) +end diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/generic_request.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/generic_request.rb new file mode 100644 index 000000000..44e329a0c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/generic_request.rb @@ -0,0 +1,414 @@ +# frozen_string_literal: true +# +# \HTTPGenericRequest is the parent of the Net::HTTPRequest class. +# +# Do not use this directly; instead, use a subclass of Net::HTTPRequest. +# +# == About the Examples +# +# :include: doc/net-http/examples.rdoc +# +class Net::HTTPGenericRequest + + include Net::HTTPHeader + + def initialize(m, reqbody, resbody, uri_or_path, initheader = nil) # :nodoc: + @method = m + @request_has_body = reqbody + @response_has_body = resbody + + if URI === uri_or_path then + raise ArgumentError, "not an HTTP URI" unless URI::HTTP === uri_or_path + hostname = uri_or_path.hostname + raise ArgumentError, "no host component for URI" unless (hostname && hostname.length > 0) + @uri = uri_or_path.dup + host = @uri.hostname.dup + host << ":" << @uri.port.to_s if @uri.port != @uri.default_port + @path = uri_or_path.request_uri + raise ArgumentError, "no HTTP request path given" unless @path + else + @uri = nil + host = nil + raise ArgumentError, "no HTTP request path given" unless uri_or_path + raise ArgumentError, "HTTP request path is empty" if uri_or_path.empty? + @path = uri_or_path.dup + end + + @decode_content = false + + if Net::HTTP::HAVE_ZLIB then + if !initheader || + !initheader.keys.any? { |k| + %w[accept-encoding range].include? k.downcase + } then + @decode_content = true if @response_has_body + initheader = initheader ? initheader.dup : {} + initheader["accept-encoding"] = + "gzip;q=1.0,deflate;q=0.6,identity;q=0.3" + end + end + + initialize_http_header initheader + self['Accept'] ||= '*/*' + self['User-Agent'] ||= 'Ruby' + self['Host'] ||= host if host + @body = nil + @body_stream = nil + @body_data = nil + end + + # Returns the string method name for the request: + # + # Net::HTTP::Get.new(uri).method # => "GET" + # Net::HTTP::Post.new(uri).method # => "POST" + # + attr_reader :method + + # Returns the string path for the request: + # + # Net::HTTP::Get.new(uri).path # => "/" + # Net::HTTP::Post.new('example.com').path # => "example.com" + # + attr_reader :path + + # Returns the URI object for the request, or +nil+ if none: + # + # Net::HTTP::Get.new(uri).uri + # # => # + # Net::HTTP::Get.new('example.com').uri # => nil + # + attr_reader :uri + + # Returns +false+ if the request's header 'Accept-Encoding' + # has been set manually or deleted + # (indicating that the user intends to handle encoding in the response), + # +true+ otherwise: + # + # req = Net::HTTP::Get.new(uri) # => # + # req['Accept-Encoding'] # => "gzip;q=1.0,deflate;q=0.6,identity;q=0.3" + # req.decode_content # => true + # req['Accept-Encoding'] = 'foo' + # req.decode_content # => false + # req.delete('Accept-Encoding') + # req.decode_content # => false + # + attr_reader :decode_content + + # Returns a string representation of the request: + # + # Net::HTTP::Post.new(uri).inspect # => "#" + # + def inspect + "\#<#{self.class} #{@method}>" + end + + ## + # Don't automatically decode response content-encoding if the user indicates + # they want to handle it. + + def []=(key, val) # :nodoc: + @decode_content = false if key.downcase == 'accept-encoding' + + super key, val + end + + # Returns whether the request may have a body: + # + # Net::HTTP::Post.new(uri).request_body_permitted? # => true + # Net::HTTP::Get.new(uri).request_body_permitted? # => false + # + def request_body_permitted? + @request_has_body + end + + # Returns whether the response may have a body: + # + # Net::HTTP::Post.new(uri).response_body_permitted? # => true + # Net::HTTP::Head.new(uri).response_body_permitted? # => false + # + def response_body_permitted? + @response_has_body + end + + def body_exist? # :nodoc: + warn "Net::HTTPRequest#body_exist? is obsolete; use response_body_permitted?", uplevel: 1 if $VERBOSE + response_body_permitted? + end + + # Returns the string body for the request, or +nil+ if there is none: + # + # req = Net::HTTP::Post.new(uri) + # req.body # => nil + # req.body = '{"title": "foo","body": "bar","userId": 1}' + # req.body # => "{\"title\": \"foo\",\"body\": \"bar\",\"userId\": 1}" + # + attr_reader :body + + # Sets the body for the request: + # + # req = Net::HTTP::Post.new(uri) + # req.body # => nil + # req.body = '{"title": "foo","body": "bar","userId": 1}' + # req.body # => "{\"title\": \"foo\",\"body\": \"bar\",\"userId\": 1}" + # + def body=(str) + @body = str + @body_stream = nil + @body_data = nil + str + end + + # Returns the body stream object for the request, or +nil+ if there is none: + # + # req = Net::HTTP::Post.new(uri) # => # + # req.body_stream # => nil + # require 'stringio' + # req.body_stream = StringIO.new('xyzzy') # => # + # req.body_stream # => # + # + attr_reader :body_stream + + # Sets the body stream for the request: + # + # req = Net::HTTP::Post.new(uri) # => # + # req.body_stream # => nil + # require 'stringio' + # req.body_stream = StringIO.new('xyzzy') # => # + # req.body_stream # => # + # + def body_stream=(input) + @body = nil + @body_stream = input + @body_data = nil + input + end + + def set_body_internal(str) #:nodoc: internal use only + raise ArgumentError, "both of body argument and HTTPRequest#body set" if str and (@body or @body_stream) + self.body = str if str + if @body.nil? && @body_stream.nil? && @body_data.nil? && request_body_permitted? + self.body = '' + end + end + + # + # write + # + + def exec(sock, ver, path) #:nodoc: internal use only + if @body + send_request_with_body sock, ver, path, @body + elsif @body_stream + send_request_with_body_stream sock, ver, path, @body_stream + elsif @body_data + send_request_with_body_data sock, ver, path, @body_data + else + write_header sock, ver, path + end + end + + def update_uri(addr, port, ssl) # :nodoc: internal use only + # reflect the connection and @path to @uri + return unless @uri + + if ssl + scheme = 'https' + klass = URI::HTTPS + else + scheme = 'http' + klass = URI::HTTP + end + + if host = self['host'] + host.sub!(/:.*/m, '') + elsif host = @uri.host + else + host = addr + end + # convert the class of the URI + if @uri.is_a?(klass) + @uri.host = host + @uri.port = port + else + @uri = klass.new( + scheme, @uri.userinfo, + host, port, nil, + @uri.path, nil, @uri.query, nil) + end + end + + private + + class Chunker #:nodoc: + def initialize(sock) + @sock = sock + @prev = nil + end + + def write(buf) + # avoid memcpy() of buf, buf can huge and eat memory bandwidth + rv = buf.bytesize + @sock.write("#{rv.to_s(16)}\r\n", buf, "\r\n") + rv + end + + def finish + @sock.write("0\r\n\r\n") + end + end + + def send_request_with_body(sock, ver, path, body) + self.content_length = body.bytesize + delete 'Transfer-Encoding' + supply_default_content_type + write_header sock, ver, path + wait_for_continue sock, ver if sock.continue_timeout + sock.write body + end + + def send_request_with_body_stream(sock, ver, path, f) + unless content_length() or chunked? + raise ArgumentError, + "Content-Length not given and Transfer-Encoding is not `chunked'" + end + supply_default_content_type + write_header sock, ver, path + wait_for_continue sock, ver if sock.continue_timeout + if chunked? + chunker = Chunker.new(sock) + IO.copy_stream(f, chunker) + chunker.finish + else + IO.copy_stream(f, sock) + end + end + + def send_request_with_body_data(sock, ver, path, params) + if /\Amultipart\/form-data\z/i !~ self.content_type + self.content_type = 'application/x-www-form-urlencoded' + return send_request_with_body(sock, ver, path, URI.encode_www_form(params)) + end + + opt = @form_option.dup + require 'securerandom' unless defined?(SecureRandom) + opt[:boundary] ||= SecureRandom.urlsafe_base64(40) + self.set_content_type(self.content_type, boundary: opt[:boundary]) + if chunked? + write_header sock, ver, path + encode_multipart_form_data(sock, params, opt) + else + require 'tempfile' + file = Tempfile.new('multipart') + file.binmode + encode_multipart_form_data(file, params, opt) + file.rewind + self.content_length = file.size + write_header sock, ver, path + IO.copy_stream(file, sock) + file.close(true) + end + end + + def encode_multipart_form_data(out, params, opt) + charset = opt[:charset] + boundary = opt[:boundary] + require 'securerandom' unless defined?(SecureRandom) + boundary ||= SecureRandom.urlsafe_base64(40) + chunked_p = chunked? + + buf = +'' + params.each do |key, value, h={}| + key = quote_string(key, charset) + filename = + h.key?(:filename) ? h[:filename] : + value.respond_to?(:to_path) ? File.basename(value.to_path) : + nil + + buf << "--#{boundary}\r\n" + if filename + filename = quote_string(filename, charset) + type = h[:content_type] || 'application/octet-stream' + buf << "Content-Disposition: form-data; " \ + "name=\"#{key}\"; filename=\"#{filename}\"\r\n" \ + "Content-Type: #{type}\r\n\r\n" + if !out.respond_to?(:write) || !value.respond_to?(:read) + # if +out+ is not an IO or +value+ is not an IO + buf << (value.respond_to?(:read) ? value.read : value) + elsif value.respond_to?(:size) && chunked_p + # if +out+ is an IO and +value+ is a File, use IO.copy_stream + flush_buffer(out, buf, chunked_p) + out << "%x\r\n" % value.size if chunked_p + IO.copy_stream(value, out) + out << "\r\n" if chunked_p + else + # +out+ is an IO, and +value+ is not a File but an IO + flush_buffer(out, buf, chunked_p) + 1 while flush_buffer(out, value.read(4096), chunked_p) + end + else + # non-file field: + # HTML5 says, "The parts of the generated multipart/form-data + # resource that correspond to non-file fields must not have a + # Content-Type header specified." + buf << "Content-Disposition: form-data; name=\"#{key}\"\r\n\r\n" + buf << (value.respond_to?(:read) ? value.read : value) + end + buf << "\r\n" + end + buf << "--#{boundary}--\r\n" + flush_buffer(out, buf, chunked_p) + out << "0\r\n\r\n" if chunked_p + end + + def quote_string(str, charset) + str = str.encode(charset, fallback:->(c){'&#%d;'%c.encode("UTF-8").ord}) if charset + str.gsub(/[\\"]/, '\\\\\&') + end + + def flush_buffer(out, buf, chunked_p) + return unless buf + out << "%x\r\n"%buf.bytesize if chunked_p + out << buf + out << "\r\n" if chunked_p + buf.clear + end + + def supply_default_content_type + return if content_type() + warn 'net/http: Content-Type did not set; using application/x-www-form-urlencoded', uplevel: 1 if $VERBOSE + set_content_type 'application/x-www-form-urlencoded' + end + + ## + # Waits up to the continue timeout for a response from the server provided + # we're speaking HTTP 1.1 and are expecting a 100-continue response. + + def wait_for_continue(sock, ver) + if ver >= '1.1' and @header['expect'] and + @header['expect'].include?('100-continue') + if sock.io.to_io.wait_readable(sock.continue_timeout) + res = Net::HTTPResponse.read_new(sock) + unless res.kind_of?(Net::HTTPContinue) + res.decode_content = @decode_content + throw :response, res + end + end + end + end + + def write_header(sock, ver, path) + reqline = "#{@method} #{path} HTTP/#{ver}" + if /[\r\n]/ =~ reqline + raise ArgumentError, "A Request-Line must not contain CR or LF" + end + buf = +'' + buf << reqline << "\r\n" + each_capitalized do |k,v| + buf << "#{k}: #{v}\r\n" + end + buf << "\r\n" + sock.write buf + end + +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/header.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/header.rb new file mode 100644 index 000000000..30c632cce --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/header.rb @@ -0,0 +1,981 @@ +# frozen_string_literal: true +# +# The \HTTPHeader module provides access to \HTTP headers. +# +# The module is included in: +# +# - Net::HTTPGenericRequest (and therefore Net::HTTPRequest). +# - Net::HTTPResponse. +# +# The headers are a hash-like collection of key/value pairs called _fields_. +# +# == Request and Response Fields +# +# Headers may be included in: +# +# - A Net::HTTPRequest object: +# the object's headers will be sent with the request. +# Any fields may be defined in the request; +# see {Setters}[rdoc-ref:Net::HTTPHeader@Setters]. +# - A Net::HTTPResponse object: +# the objects headers are usually those returned from the host. +# Fields may be retrieved from the object; +# see {Getters}[rdoc-ref:Net::HTTPHeader@Getters] +# and {Iterators}[rdoc-ref:Net::HTTPHeader@Iterators]. +# +# Exactly which fields should be sent or expected depends on the host; +# see: +# +# - {Request fields}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Request_fields]. +# - {Response fields}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Response_fields]. +# +# == About the Examples +# +# :include: doc/net-http/examples.rdoc +# +# == Fields +# +# A header field is a key/value pair. +# +# === Field Keys +# +# A field key may be: +# +# - A string: Key 'Accept' is treated as if it were +# 'Accept'.downcase; i.e., 'accept'. +# - A symbol: Key :Accept is treated as if it were +# :Accept.to_s.downcase; i.e., 'accept'. +# +# Examples: +# +# req = Net::HTTP::Get.new(uri) +# req[:accept] # => "*/*" +# req['Accept'] # => "*/*" +# req['ACCEPT'] # => "*/*" +# +# req['accept'] = 'text/html' +# req[:accept] = 'text/html' +# req['ACCEPT'] = 'text/html' +# +# === Field Values +# +# A field value may be returned as an array of strings or as a string: +# +# - These methods return field values as arrays: +# +# - #get_fields: Returns the array value for the given key, +# or +nil+ if it does not exist. +# - #to_hash: Returns a hash of all header fields: +# each key is a field name; its value is the array value for the field. +# +# - These methods return field values as string; +# the string value for a field is equivalent to +# self[key.downcase.to_s].join(', ')): +# +# - #[]: Returns the string value for the given key, +# or +nil+ if it does not exist. +# - #fetch: Like #[], but accepts a default value +# to be returned if the key does not exist. +# +# The field value may be set: +# +# - #[]=: Sets the value for the given key; +# the given value may be a string, a symbol, an array, or a hash. +# - #add_field: Adds a given value to a value for the given key +# (not overwriting the existing value). +# - #delete: Deletes the field for the given key. +# +# Example field values: +# +# - \String: +# +# req['Accept'] = 'text/html' # => "text/html" +# req['Accept'] # => "text/html" +# req.get_fields('Accept') # => ["text/html"] +# +# - \Symbol: +# +# req['Accept'] = :text # => :text +# req['Accept'] # => "text" +# req.get_fields('Accept') # => ["text"] +# +# - Simple array: +# +# req[:foo] = %w[bar baz bat] +# req[:foo] # => "bar, baz, bat" +# req.get_fields(:foo) # => ["bar", "baz", "bat"] +# +# - Simple hash: +# +# req[:foo] = {bar: 0, baz: 1, bat: 2} +# req[:foo] # => "bar, 0, baz, 1, bat, 2" +# req.get_fields(:foo) # => ["bar", "0", "baz", "1", "bat", "2"] +# +# - Nested: +# +# req[:foo] = [%w[bar baz], {bat: 0, bam: 1}] +# req[:foo] # => "bar, baz, bat, 0, bam, 1" +# req.get_fields(:foo) # => ["bar", "baz", "bat", "0", "bam", "1"] +# +# req[:foo] = {bar: %w[baz bat], bam: {bah: 0, bad: 1}} +# req[:foo] # => "bar, baz, bat, bam, bah, 0, bad, 1" +# req.get_fields(:foo) # => ["bar", "baz", "bat", "bam", "bah", "0", "bad", "1"] +# +# == Convenience Methods +# +# Various convenience methods retrieve values, set values, query values, +# set form values, or iterate over fields. +# +# === Setters +# +# \Method #[]= can set any field, but does little to validate the new value; +# some of the other setter methods provide some validation: +# +# - #[]=: Sets the string or array value for the given key. +# - #add_field: Creates or adds to the array value for the given key. +# - #basic_auth: Sets the string authorization header for 'Authorization'. +# - #content_length=: Sets the integer length for field 'Content-Length. +# - #content_type=: Sets the string value for field 'Content-Type'. +# - #proxy_basic_auth: Sets the string authorization header for 'Proxy-Authorization'. +# - #set_range: Sets the value for field 'Range'. +# +# === Form Setters +# +# - #set_form: Sets an HTML form data set. +# - #set_form_data: Sets header fields and a body from HTML form data. +# +# === Getters +# +# \Method #[] can retrieve the value of any field that exists, +# but always as a string; +# some of the other getter methods return something different +# from the simple string value: +# +# - #[]: Returns the string field value for the given key. +# - #content_length: Returns the integer value of field 'Content-Length'. +# - #content_range: Returns the Range value of field 'Content-Range'. +# - #content_type: Returns the string value of field 'Content-Type'. +# - #fetch: Returns the string field value for the given key. +# - #get_fields: Returns the array field value for the given +key+. +# - #main_type: Returns first part of the string value of field 'Content-Type'. +# - #sub_type: Returns second part of the string value of field 'Content-Type'. +# - #range: Returns an array of Range objects of field 'Range', or +nil+. +# - #range_length: Returns the integer length of the range given in field 'Content-Range'. +# - #type_params: Returns the string parameters for 'Content-Type'. +# +# === Queries +# +# - #chunked?: Returns whether field 'Transfer-Encoding' is set to 'chunked'. +# - #connection_close?: Returns whether field 'Connection' is set to 'close'. +# - #connection_keep_alive?: Returns whether field 'Connection' is set to 'keep-alive'. +# - #key?: Returns whether a given key exists. +# +# === Iterators +# +# - #each_capitalized: Passes each field capitalized-name/value pair to the block. +# - #each_capitalized_name: Passes each capitalized field name to the block. +# - #each_header: Passes each field name/value pair to the block. +# - #each_name: Passes each field name to the block. +# - #each_value: Passes each string field value to the block. +# +module Net::HTTPHeader + MAX_KEY_LENGTH = 1024 + MAX_FIELD_LENGTH = 65536 + + def initialize_http_header(initheader) #:nodoc: + @header = {} + return unless initheader + initheader.each do |key, value| + warn "net/http: duplicated HTTP header: #{key}", uplevel: 3 if key?(key) and $VERBOSE + if value.nil? + warn "net/http: nil HTTP header: #{key}", uplevel: 3 if $VERBOSE + else + value = value.strip # raise error for invalid byte sequences + if key.to_s.bytesize > MAX_KEY_LENGTH + raise ArgumentError, "too long (#{key.bytesize} bytes) header: #{key[0, 30].inspect}..." + end + if value.to_s.bytesize > MAX_FIELD_LENGTH + raise ArgumentError, "header #{key} has too long field value: #{value.bytesize}" + end + if value.count("\r\n") > 0 + raise ArgumentError, "header #{key} has field value #{value.inspect}, this cannot include CR/LF" + end + @header[key.downcase.to_s] = [value] + end + end + end + + def size #:nodoc: obsolete + @header.size + end + + alias length size #:nodoc: obsolete + + # Returns the string field value for the case-insensitive field +key+, + # or +nil+ if there is no such key; + # see {Fields}[rdoc-ref:Net::HTTPHeader@Fields]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['Connection'] # => "keep-alive" + # res['Nosuch'] # => nil + # + # Note that some field values may be retrieved via convenience methods; + # see {Getters}[rdoc-ref:Net::HTTPHeader@Getters]. + def [](key) + a = @header[key.downcase.to_s] or return nil + a.join(', ') + end + + # Sets the value for the case-insensitive +key+ to +val+, + # overwriting the previous value if the field exists; + # see {Fields}[rdoc-ref:Net::HTTPHeader@Fields]: + # + # req = Net::HTTP::Get.new(uri) + # req['Accept'] # => "*/*" + # req['Accept'] = 'text/html' + # req['Accept'] # => "text/html" + # + # Note that some field values may be set via convenience methods; + # see {Setters}[rdoc-ref:Net::HTTPHeader@Setters]. + def []=(key, val) + unless val + @header.delete key.downcase.to_s + return val + end + set_field(key, val) + end + + # Adds value +val+ to the value array for field +key+ if the field exists; + # creates the field with the given +key+ and +val+ if it does not exist. + # see {Fields}[rdoc-ref:Net::HTTPHeader@Fields]: + # + # req = Net::HTTP::Get.new(uri) + # req.add_field('Foo', 'bar') + # req['Foo'] # => "bar" + # req.add_field('Foo', 'baz') + # req['Foo'] # => "bar, baz" + # req.add_field('Foo', %w[baz bam]) + # req['Foo'] # => "bar, baz, baz, bam" + # req.get_fields('Foo') # => ["bar", "baz", "baz", "bam"] + # + def add_field(key, val) + stringified_downcased_key = key.downcase.to_s + if @header.key?(stringified_downcased_key) + append_field_value(@header[stringified_downcased_key], val) + else + set_field(key, val) + end + end + + private def set_field(key, val) + case val + when Enumerable + ary = [] + append_field_value(ary, val) + @header[key.downcase.to_s] = ary + else + val = val.to_s # for compatibility use to_s instead of to_str + if val.b.count("\r\n") > 0 + raise ArgumentError, 'header field value cannot include CR/LF' + end + @header[key.downcase.to_s] = [val] + end + end + + private def append_field_value(ary, val) + case val + when Enumerable + val.each{|x| append_field_value(ary, x)} + else + val = val.to_s + if /[\r\n]/n.match?(val.b) + raise ArgumentError, 'header field value cannot include CR/LF' + end + ary.push val + end + end + + # Returns the array field value for the given +key+, + # or +nil+ if there is no such field; + # see {Fields}[rdoc-ref:Net::HTTPHeader@Fields]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res.get_fields('Connection') # => ["keep-alive"] + # res.get_fields('Nosuch') # => nil + # + def get_fields(key) + stringified_downcased_key = key.downcase.to_s + return nil unless @header[stringified_downcased_key] + @header[stringified_downcased_key].dup + end + + # call-seq: + # fetch(key, default_val = nil) {|key| ... } -> object + # fetch(key, default_val = nil) -> value or default_val + # + # With a block, returns the string value for +key+ if it exists; + # otherwise returns the value of the block; + # ignores the +default_val+; + # see {Fields}[rdoc-ref:Net::HTTPHeader@Fields]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # + # # Field exists; block not called. + # res.fetch('Connection') do |value| + # fail 'Cannot happen' + # end # => "keep-alive" + # + # # Field does not exist; block called. + # res.fetch('Nosuch') do |value| + # value.downcase + # end # => "nosuch" + # + # With no block, returns the string value for +key+ if it exists; + # otherwise, returns +default_val+ if it was given; + # otherwise raises an exception: + # + # res.fetch('Connection', 'Foo') # => "keep-alive" + # res.fetch('Nosuch', 'Foo') # => "Foo" + # res.fetch('Nosuch') # Raises KeyError. + # + def fetch(key, *args, &block) #:yield: +key+ + a = @header.fetch(key.downcase.to_s, *args, &block) + a.kind_of?(Array) ? a.join(', ') : a + end + + # Calls the block with each key/value pair: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res.each_header do |key, value| + # p [key, value] if key.start_with?('c') + # end + # + # Output: + # + # ["content-type", "application/json; charset=utf-8"] + # ["connection", "keep-alive"] + # ["cache-control", "max-age=43200"] + # ["cf-cache-status", "HIT"] + # ["cf-ray", "771d17e9bc542cf5-ORD"] + # + # Returns an enumerator if no block is given. + # + # Net::HTTPHeader#each is an alias for Net::HTTPHeader#each_header. + def each_header #:yield: +key+, +value+ + block_given? or return enum_for(__method__) { @header.size } + @header.each do |k,va| + yield k, va.join(', ') + end + end + + alias each each_header + + # Calls the block with each field key: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res.each_key do |key| + # p key if key.start_with?('c') + # end + # + # Output: + # + # "content-type" + # "connection" + # "cache-control" + # "cf-cache-status" + # "cf-ray" + # + # Returns an enumerator if no block is given. + # + # Net::HTTPHeader#each_name is an alias for Net::HTTPHeader#each_key. + def each_name(&block) #:yield: +key+ + block_given? or return enum_for(__method__) { @header.size } + @header.each_key(&block) + end + + alias each_key each_name + + # Calls the block with each capitalized field name: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res.each_capitalized_name do |key| + # p key if key.start_with?('C') + # end + # + # Output: + # + # "Content-Type" + # "Connection" + # "Cache-Control" + # "Cf-Cache-Status" + # "Cf-Ray" + # + # The capitalization is system-dependent; + # see {Case Mapping}[https://docs.ruby-lang.org/en/master/case_mapping_rdoc.html]. + # + # Returns an enumerator if no block is given. + def each_capitalized_name #:yield: +key+ + block_given? or return enum_for(__method__) { @header.size } + @header.each_key do |k| + yield capitalize(k) + end + end + + # Calls the block with each string field value: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res.each_value do |value| + # p value if value.start_with?('c') + # end + # + # Output: + # + # "chunked" + # "cf-q-config;dur=6.0000002122251e-06" + # "cloudflare" + # + # Returns an enumerator if no block is given. + def each_value #:yield: +value+ + block_given? or return enum_for(__method__) { @header.size } + @header.each_value do |va| + yield va.join(', ') + end + end + + # Removes the header for the given case-insensitive +key+ + # (see {Fields}[rdoc-ref:Net::HTTPHeader@Fields]); + # returns the deleted value, or +nil+ if no such field exists: + # + # req = Net::HTTP::Get.new(uri) + # req.delete('Accept') # => ["*/*"] + # req.delete('Nosuch') # => nil + # + def delete(key) + @header.delete(key.downcase.to_s) + end + + # Returns +true+ if the field for the case-insensitive +key+ exists, +false+ otherwise: + # + # req = Net::HTTP::Get.new(uri) + # req.key?('Accept') # => true + # req.key?('Nosuch') # => false + # + def key?(key) + @header.key?(key.downcase.to_s) + end + + # Returns a hash of the key/value pairs: + # + # req = Net::HTTP::Get.new(uri) + # req.to_hash + # # => + # {"accept-encoding"=>["gzip;q=1.0,deflate;q=0.6,identity;q=0.3"], + # "accept"=>["*/*"], + # "user-agent"=>["Ruby"], + # "host"=>["jsonplaceholder.typicode.com"]} + # + def to_hash + @header.dup + end + + # Like #each_header, but the keys are returned in capitalized form. + # + # Net::HTTPHeader#canonical_each is an alias for Net::HTTPHeader#each_capitalized. + def each_capitalized + block_given? or return enum_for(__method__) { @header.size } + @header.each do |k,v| + yield capitalize(k), v.join(', ') + end + end + + alias canonical_each each_capitalized + + def capitalize(name) + name.to_s.split('-'.freeze).map {|s| s.capitalize }.join('-'.freeze) + end + private :capitalize + + # Returns an array of Range objects that represent + # the value of field 'Range', + # or +nil+ if there is no such field; + # see {Range request header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#range-request-header]: + # + # req = Net::HTTP::Get.new(uri) + # req['Range'] = 'bytes=0-99,200-299,400-499' + # req.range # => [0..99, 200..299, 400..499] + # req.delete('Range') + # req.range # # => nil + # + def range + return nil unless @header['range'] + + value = self['Range'] + # byte-range-set = *( "," OWS ) ( byte-range-spec / suffix-byte-range-spec ) + # *( OWS "," [ OWS ( byte-range-spec / suffix-byte-range-spec ) ] ) + # corrected collected ABNF + # http://tools.ietf.org/html/draft-ietf-httpbis-p5-range-19#section-5.4.1 + # http://tools.ietf.org/html/draft-ietf-httpbis-p5-range-19#appendix-C + # http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-19#section-3.2.5 + unless /\Abytes=((?:,[ \t]*)*(?:\d+-\d*|-\d+)(?:[ \t]*,(?:[ \t]*\d+-\d*|-\d+)?)*)\z/ =~ value + raise Net::HTTPHeaderSyntaxError, "invalid syntax for byte-ranges-specifier: '#{value}'" + end + + byte_range_set = $1 + result = byte_range_set.split(/,/).map {|spec| + m = /(\d+)?\s*-\s*(\d+)?/i.match(spec) or + raise Net::HTTPHeaderSyntaxError, "invalid byte-range-spec: '#{spec}'" + d1 = m[1].to_i + d2 = m[2].to_i + if m[1] and m[2] + if d1 > d2 + raise Net::HTTPHeaderSyntaxError, "last-byte-pos MUST greater than or equal to first-byte-pos but '#{spec}'" + end + d1..d2 + elsif m[1] + d1..-1 + elsif m[2] + -d2..-1 + else + raise Net::HTTPHeaderSyntaxError, 'range is not specified' + end + } + # if result.empty? + # byte-range-set must include at least one byte-range-spec or suffix-byte-range-spec + # but above regexp already denies it. + if result.size == 1 && result[0].begin == 0 && result[0].end == -1 + raise Net::HTTPHeaderSyntaxError, 'only one suffix-byte-range-spec with zero suffix-length' + end + result + end + + # call-seq: + # set_range(length) -> length + # set_range(offset, length) -> range + # set_range(begin..length) -> range + # + # Sets the value for field 'Range'; + # see {Range request header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#range-request-header]: + # + # With argument +length+: + # + # req = Net::HTTP::Get.new(uri) + # req.set_range(100) # => 100 + # req['Range'] # => "bytes=0-99" + # + # With arguments +offset+ and +length+: + # + # req.set_range(100, 100) # => 100...200 + # req['Range'] # => "bytes=100-199" + # + # With argument +range+: + # + # req.set_range(100..199) # => 100..199 + # req['Range'] # => "bytes=100-199" + # + # Net::HTTPHeader#range= is an alias for Net::HTTPHeader#set_range. + def set_range(r, e = nil) + unless r + @header.delete 'range' + return r + end + r = (r...r+e) if e + case r + when Numeric + n = r.to_i + rangestr = (n > 0 ? "0-#{n-1}" : "-#{-n}") + when Range + first = r.first + last = r.end + last -= 1 if r.exclude_end? + if last == -1 + rangestr = (first > 0 ? "#{first}-" : "-#{-first}") + else + raise Net::HTTPHeaderSyntaxError, 'range.first is negative' if first < 0 + raise Net::HTTPHeaderSyntaxError, 'range.last is negative' if last < 0 + raise Net::HTTPHeaderSyntaxError, 'must be .first < .last' if first > last + rangestr = "#{first}-#{last}" + end + else + raise TypeError, 'Range/Integer is required' + end + @header['range'] = ["bytes=#{rangestr}"] + r + end + + alias range= set_range + + # Returns the value of field 'Content-Length' as an integer, + # or +nil+ if there is no such field; + # see {Content-Length request header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-length-request-header]: + # + # res = Net::HTTP.get_response(hostname, '/nosuch/1') + # res.content_length # => 2 + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res.content_length # => nil + # + def content_length + return nil unless key?('Content-Length') + len = self['Content-Length'].slice(/\d+/) or + raise Net::HTTPHeaderSyntaxError, 'wrong Content-Length format' + len.to_i + end + + # Sets the value of field 'Content-Length' to the given numeric; + # see {Content-Length response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-length-response-header]: + # + # _uri = uri.dup + # hostname = _uri.hostname # => "jsonplaceholder.typicode.com" + # _uri.path = '/posts' # => "/posts" + # req = Net::HTTP::Post.new(_uri) # => # + # req.body = '{"title": "foo","body": "bar","userId": 1}' + # req.content_length = req.body.size # => 42 + # req.content_type = 'application/json' + # res = Net::HTTP.start(hostname) do |http| + # http.request(req) + # end # => # + # + def content_length=(len) + unless len + @header.delete 'content-length' + return nil + end + @header['content-length'] = [len.to_i.to_s] + end + + # Returns +true+ if field 'Transfer-Encoding' + # exists and has value 'chunked', + # +false+ otherwise; + # see {Transfer-Encoding response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#transfer-encoding-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['Transfer-Encoding'] # => "chunked" + # res.chunked? # => true + # + def chunked? + return false unless @header['transfer-encoding'] + field = self['Transfer-Encoding'] + (/(?:\A|[^\-\w])chunked(?![\-\w])/i =~ field) ? true : false + end + + # Returns a Range object representing the value of field + # 'Content-Range', or +nil+ if no such field exists; + # see {Content-Range response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-range-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['Content-Range'] # => nil + # res['Content-Range'] = 'bytes 0-499/1000' + # res['Content-Range'] # => "bytes 0-499/1000" + # res.content_range # => 0..499 + # + def content_range + return nil unless @header['content-range'] + m = %r<\A\s*(\w+)\s+(\d+)-(\d+)/(\d+|\*)>.match(self['Content-Range']) or + raise Net::HTTPHeaderSyntaxError, 'wrong Content-Range format' + return unless m[1] == 'bytes' + m[2].to_i .. m[3].to_i + end + + # Returns the integer representing length of the value of field + # 'Content-Range', or +nil+ if no such field exists; + # see {Content-Range response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-range-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['Content-Range'] # => nil + # res['Content-Range'] = 'bytes 0-499/1000' + # res.range_length # => 500 + # + def range_length + r = content_range() or return nil + r.end - r.begin + 1 + end + + # Returns the {media type}[https://en.wikipedia.org/wiki/Media_type] + # from the value of field 'Content-Type', + # or +nil+ if no such field exists; + # see {Content-Type response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-type-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['content-type'] # => "application/json; charset=utf-8" + # res.content_type # => "application/json" + # + def content_type + main = main_type() + return nil unless main + + sub = sub_type() + if sub + "#{main}/#{sub}" + else + main + end + end + + # Returns the leading ('type') part of the + # {media type}[https://en.wikipedia.org/wiki/Media_type] + # from the value of field 'Content-Type', + # or +nil+ if no such field exists; + # see {Content-Type response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-type-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['content-type'] # => "application/json; charset=utf-8" + # res.main_type # => "application" + # + def main_type + return nil unless @header['content-type'] + self['Content-Type'].split(';').first.to_s.split('/')[0].to_s.strip + end + + # Returns the trailing ('subtype') part of the + # {media type}[https://en.wikipedia.org/wiki/Media_type] + # from the value of field 'Content-Type', + # or +nil+ if no such field exists; + # see {Content-Type response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-type-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['content-type'] # => "application/json; charset=utf-8" + # res.sub_type # => "json" + # + def sub_type + return nil unless @header['content-type'] + _, sub = *self['Content-Type'].split(';').first.to_s.split('/') + return nil unless sub + sub.strip + end + + # Returns the trailing ('parameters') part of the value of field 'Content-Type', + # or +nil+ if no such field exists; + # see {Content-Type response header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-type-response-header]: + # + # res = Net::HTTP.get_response(hostname, '/todos/1') + # res['content-type'] # => "application/json; charset=utf-8" + # res.type_params # => {"charset"=>"utf-8"} + # + def type_params + result = {} + list = self['Content-Type'].to_s.split(';') + list.shift + list.each do |param| + k, v = *param.split('=', 2) + result[k.strip] = v.strip + end + result + end + + # Sets the value of field 'Content-Type'; + # returns the new value; + # see {Content-Type request header}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#content-type-request-header]: + # + # req = Net::HTTP::Get.new(uri) + # req.set_content_type('application/json') # => ["application/json"] + # + # Net::HTTPHeader#content_type= is an alias for Net::HTTPHeader#set_content_type. + def set_content_type(type, params = {}) + @header['content-type'] = [type + params.map{|k,v|"; #{k}=#{v}"}.join('')] + end + + alias content_type= set_content_type + + # Sets the request body to a URL-encoded string derived from argument +params+, + # and sets request header field 'Content-Type' + # to 'application/x-www-form-urlencoded'. + # + # The resulting request is suitable for HTTP request +POST+ or +PUT+. + # + # Argument +params+ must be suitable for use as argument +enum+ to + # {URI.encode_www_form}[https://docs.ruby-lang.org/en/master/URI.html#method-c-encode_www_form]. + # + # With only argument +params+ given, + # sets the body to a URL-encoded string with the default separator '&': + # + # req = Net::HTTP::Post.new('example.com') + # + # req.set_form_data(q: 'ruby', lang: 'en') + # req.body # => "q=ruby&lang=en" + # req['Content-Type'] # => "application/x-www-form-urlencoded" + # + # req.set_form_data([['q', 'ruby'], ['lang', 'en']]) + # req.body # => "q=ruby&lang=en" + # + # req.set_form_data(q: ['ruby', 'perl'], lang: 'en') + # req.body # => "q=ruby&q=perl&lang=en" + # + # req.set_form_data([['q', 'ruby'], ['q', 'perl'], ['lang', 'en']]) + # req.body # => "q=ruby&q=perl&lang=en" + # + # With string argument +sep+ also given, + # uses that string as the separator: + # + # req.set_form_data({q: 'ruby', lang: 'en'}, '|') + # req.body # => "q=ruby|lang=en" + # + # Net::HTTPHeader#form_data= is an alias for Net::HTTPHeader#set_form_data. + def set_form_data(params, sep = '&') + query = URI.encode_www_form(params) + query.gsub!(/&/, sep) if sep != '&' + self.body = query + self.content_type = 'application/x-www-form-urlencoded' + end + + alias form_data= set_form_data + + # Stores form data to be used in a +POST+ or +PUT+ request. + # + # The form data given in +params+ consists of zero or more fields; + # each field is: + # + # - A scalar value. + # - A name/value pair. + # - An IO stream opened for reading. + # + # Argument +params+ should be an + # {Enumerable}[https://docs.ruby-lang.org/en/master/Enumerable.html#module-Enumerable-label-Enumerable+in+Ruby+Classes] + # (method params.map will be called), + # and is often an array or hash. + # + # First, we set up a request: + # + # _uri = uri.dup + # _uri.path ='/posts' + # req = Net::HTTP::Post.new(_uri) + # + # Argument +params+ As an Array + # + # When +params+ is an array, + # each of its elements is a subarray that defines a field; + # the subarray may contain: + # + # - One string: + # + # req.set_form([['foo'], ['bar'], ['baz']]) + # + # - Two strings: + # + # req.set_form([%w[foo 0], %w[bar 1], %w[baz 2]]) + # + # - When argument +enctype+ (see below) is given as + # 'multipart/form-data': + # + # - A string name and an IO stream opened for reading: + # + # require 'stringio' + # req.set_form([['file', StringIO.new('Ruby is cool.')]]) + # + # - A string name, an IO stream opened for reading, + # and an options hash, which may contain these entries: + # + # - +:filename+: The name of the file to use. + # - +:content_type+: The content type of the uploaded file. + # + # Example: + # + # req.set_form([['file', file, {filename: "other-filename.foo"}]] + # + # The various forms may be mixed: + # + # req.set_form(['foo', %w[bar 1], ['file', file]]) + # + # Argument +params+ As a Hash + # + # When +params+ is a hash, + # each of its entries is a name/value pair that defines a field: + # + # - The name is a string. + # - The value may be: + # + # - +nil+. + # - Another string. + # - An IO stream opened for reading + # (only when argument +enctype+ -- see below -- is given as + # 'multipart/form-data'). + # + # Examples: + # + # # Nil-valued fields. + # req.set_form({'foo' => nil, 'bar' => nil, 'baz' => nil}) + # + # # String-valued fields. + # req.set_form({'foo' => 0, 'bar' => 1, 'baz' => 2}) + # + # # IO-valued field. + # require 'stringio' + # req.set_form({'file' => StringIO.new('Ruby is cool.')}) + # + # # Mixture of fields. + # req.set_form({'foo' => nil, 'bar' => 1, 'file' => file}) + # + # Optional argument +enctype+ specifies the value to be given + # to field 'Content-Type', and must be one of: + # + # - 'application/x-www-form-urlencoded' (the default). + # - 'multipart/form-data'; + # see {RFC 7578}[https://www.rfc-editor.org/rfc/rfc7578]. + # + # Optional argument +formopt+ is a hash of options + # (applicable only when argument +enctype+ + # is 'multipart/form-data') + # that may include the following entries: + # + # - +:boundary+: The value is the boundary string for the multipart message. + # If not given, the boundary is a random string. + # See {Boundary}[https://www.rfc-editor.org/rfc/rfc7578#section-4.1]. + # - +:charset+: Value is the character set for the form submission. + # Field names and values of non-file fields should be encoded with this charset. + # + def set_form(params, enctype='application/x-www-form-urlencoded', formopt={}) + @body_data = params + @body = nil + @body_stream = nil + @form_option = formopt + case enctype + when /\Aapplication\/x-www-form-urlencoded\z/i, + /\Amultipart\/form-data\z/i + self.content_type = enctype + else + raise ArgumentError, "invalid enctype: #{enctype}" + end + end + + # Sets header 'Authorization' using the given + # +account+ and +password+ strings: + # + # req.basic_auth('my_account', 'my_password') + # req['Authorization'] + # # => "Basic bXlfYWNjb3VudDpteV9wYXNzd29yZA==" + # + def basic_auth(account, password) + @header['authorization'] = [basic_encode(account, password)] + end + + # Sets header 'Proxy-Authorization' using the given + # +account+ and +password+ strings: + # + # req.proxy_basic_auth('my_account', 'my_password') + # req['Proxy-Authorization'] + # # => "Basic bXlfYWNjb3VudDpteV9wYXNzd29yZA==" + # + def proxy_basic_auth(account, password) + @header['proxy-authorization'] = [basic_encode(account, password)] + end + + def basic_encode(account, password) + 'Basic ' + ["#{account}:#{password}"].pack('m0') + end + private :basic_encode + +# Returns whether the HTTP session is to be closed. + def connection_close? + token = /(?:\A|,)\s*close\s*(?:\z|,)/i + @header['connection']&.grep(token) {return true} + @header['proxy-connection']&.grep(token) {return true} + false + end + +# Returns whether the HTTP session is to be kept alive. + def connection_keep_alive? + token = /(?:\A|,)\s*keep-alive\s*(?:\z|,)/i + @header['connection']&.grep(token) {return true} + @header['proxy-connection']&.grep(token) {return true} + false + end + +end diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/proxy_delta.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/proxy_delta.rb new file mode 100644 index 000000000..e7d30def6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/proxy_delta.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true +module Net::HTTP::ProxyDelta #:nodoc: internal use only + private + + def conn_address + proxy_address() + end + + def conn_port + proxy_port() + end + + def edit_path(path) + use_ssl? ? path : "http://#{addr_port()}#{path}" + end +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/request.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/request.rb new file mode 100644 index 000000000..4a138572e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/request.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +# This class is the base class for \Net::HTTP request classes. +# The class should not be used directly; +# instead you should use its subclasses, listed below. +# +# == Creating a Request +# +# An request object may be created with either a URI or a string hostname: +# +# require 'net/http' +# uri = URI('https://jsonplaceholder.typicode.com/') +# req = Net::HTTP::Get.new(uri) # => # +# req = Net::HTTP::Get.new(uri.hostname) # => # +# +# And with any of the subclasses: +# +# req = Net::HTTP::Head.new(uri) # => # +# req = Net::HTTP::Post.new(uri) # => # +# req = Net::HTTP::Put.new(uri) # => # +# # ... +# +# The new instance is suitable for use as the argument to Net::HTTP#request. +# +# == Request Headers +# +# A new request object has these header fields by default: +# +# req.to_hash +# # => +# {"accept-encoding"=>["gzip;q=1.0,deflate;q=0.6,identity;q=0.3"], +# "accept"=>["*/*"], +# "user-agent"=>["Ruby"], +# "host"=>["jsonplaceholder.typicode.com"]} +# +# See: +# +# - {Request header Accept-Encoding}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Accept-Encoding] +# and {Compression and Decompression}[rdoc-ref:Net::HTTP@Compression+and+Decompression]. +# - {Request header Accept}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#accept-request-header]. +# - {Request header User-Agent}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#user-agent-request-header]. +# - {Request header Host}[https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#host-request-header]. +# +# You can add headers or override default headers: +# +# # res = Net::HTTP::Get.new(uri, {'foo' => '0', 'bar' => '1'}) +# +# This class (and therefore its subclasses) also includes (indirectly) +# module Net::HTTPHeader, which gives access to its +# {methods for setting headers}[rdoc-ref:Net::HTTPHeader@Setters]. +# +# == Request Subclasses +# +# Subclasses for HTTP requests: +# +# - Net::HTTP::Get +# - Net::HTTP::Head +# - Net::HTTP::Post +# - Net::HTTP::Put +# - Net::HTTP::Delete +# - Net::HTTP::Options +# - Net::HTTP::Trace +# - Net::HTTP::Patch +# +# Subclasses for WebDAV requests: +# +# - Net::HTTP::Propfind +# - Net::HTTP::Proppatch +# - Net::HTTP::Mkcol +# - Net::HTTP::Copy +# - Net::HTTP::Move +# - Net::HTTP::Lock +# - Net::HTTP::Unlock +# +class Net::HTTPRequest < Net::HTTPGenericRequest + # Creates an HTTP request object for +path+. + # + # +initheader+ are the default headers to use. Net::HTTP adds + # Accept-Encoding to enable compression of the response body unless + # Accept-Encoding or Range are supplied in +initheader+. + + def initialize(path, initheader = nil) + super self.class::METHOD, + self.class::REQUEST_HAS_BODY, + self.class::RESPONSE_HAS_BODY, + path, initheader + end +end diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/requests.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/requests.rb new file mode 100644 index 000000000..e58057adf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/requests.rb @@ -0,0 +1,430 @@ +# frozen_string_literal: true + +# HTTP/1.1 methods --- RFC2616 + +# \Class for representing +# {HTTP method GET}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#GET_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Get.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: optional. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: yes. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: yes. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: yes. +# +# Related: +# +# - Net::HTTP.get: sends +GET+ request, returns response body. +# - Net::HTTP#get: sends +GET+ request, returns response object. +# +class Net::HTTP::Get < Net::HTTPRequest + METHOD = 'GET' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {HTTP method HEAD}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#HEAD_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Head.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: optional. +# - Response body: no. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: yes. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: yes. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: yes. +# +# Related: +# +# - Net::HTTP#head: sends +HEAD+ request, returns response object. +# +class Net::HTTP::Head < Net::HTTPRequest + METHOD = 'HEAD' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = false +end + +# \Class for representing +# {HTTP method POST}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#POST_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# uri.path = '/posts' +# req = Net::HTTP::Post.new(uri) # => # +# req.body = '{"title": "foo","body": "bar","userId": 1}' +# req.content_type = 'application/json' +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: yes. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: no. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: no. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: yes. +# +# Related: +# +# - Net::HTTP.post: sends +POST+ request, returns response object. +# - Net::HTTP#post: sends +POST+ request, returns response object. +# +class Net::HTTP::Post < Net::HTTPRequest + METHOD = 'POST' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {HTTP method PUT}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#PUT_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# uri.path = '/posts' +# req = Net::HTTP::Put.new(uri) # => # +# req.body = '{"title": "foo","body": "bar","userId": 1}' +# req.content_type = 'application/json' +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: yes. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: no. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: yes. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: no. +# +# Related: +# +# - Net::HTTP.put: sends +PUT+ request, returns response object. +# - Net::HTTP#put: sends +PUT+ request, returns response object. +# +class Net::HTTP::Put < Net::HTTPRequest + METHOD = 'PUT' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {HTTP method DELETE}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#DELETE_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# uri.path = '/posts/1' +# req = Net::HTTP::Delete.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: optional. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: no. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: yes. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: no. +# +# Related: +# +# - Net::HTTP#delete: sends +DELETE+ request, returns response object. +# +class Net::HTTP::Delete < Net::HTTPRequest + METHOD = 'DELETE' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {HTTP method OPTIONS}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#OPTIONS_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Options.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: optional. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: yes. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: yes. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: no. +# +# Related: +# +# - Net::HTTP#options: sends +OPTIONS+ request, returns response object. +# +class Net::HTTP::Options < Net::HTTPRequest + METHOD = 'OPTIONS' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {HTTP method TRACE}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#TRACE_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Trace.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: no. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: yes. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: yes. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: no. +# +# Related: +# +# - Net::HTTP#trace: sends +TRACE+ request, returns response object. +# +class Net::HTTP::Trace < Net::HTTPRequest + METHOD = 'TRACE' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {HTTP method PATCH}[https://en.wikipedia.org/w/index.php?title=Hypertext_Transfer_Protocol#PATCH_method]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# uri.path = '/posts' +# req = Net::HTTP::Patch.new(uri) # => # +# req.body = '{"title": "foo","body": "bar","userId": 1}' +# req.content_type = 'application/json' +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Properties: +# +# - Request body: yes. +# - Response body: yes. +# - {Safe}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Safe_methods]: no. +# - {Idempotent}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Idempotent_methods]: no. +# - {Cacheable}[https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Cacheable_methods]: no. +# +# Related: +# +# - Net::HTTP#patch: sends +PATCH+ request, returns response object. +# +class Net::HTTP::Patch < Net::HTTPRequest + METHOD = 'PATCH' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# +# WebDAV methods --- RFC2518 +# + +# \Class for representing +# {WebDAV method PROPFIND}[http://www.webdav.org/specs/rfc4918.html#METHOD_PROPFIND]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Propfind.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#propfind: sends +PROPFIND+ request, returns response object. +# +class Net::HTTP::Propfind < Net::HTTPRequest + METHOD = 'PROPFIND' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {WebDAV method PROPPATCH}[http://www.webdav.org/specs/rfc4918.html#METHOD_PROPPATCH]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Proppatch.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#proppatch: sends +PROPPATCH+ request, returns response object. +# +class Net::HTTP::Proppatch < Net::HTTPRequest + METHOD = 'PROPPATCH' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {WebDAV method MKCOL}[http://www.webdav.org/specs/rfc4918.html#METHOD_MKCOL]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Mkcol.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#mkcol: sends +MKCOL+ request, returns response object. +# +class Net::HTTP::Mkcol < Net::HTTPRequest + METHOD = 'MKCOL' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {WebDAV method COPY}[http://www.webdav.org/specs/rfc4918.html#METHOD_COPY]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Copy.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#copy: sends +COPY+ request, returns response object. +# +class Net::HTTP::Copy < Net::HTTPRequest + METHOD = 'COPY' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {WebDAV method MOVE}[http://www.webdav.org/specs/rfc4918.html#METHOD_MOVE]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Move.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#move: sends +MOVE+ request, returns response object. +# +class Net::HTTP::Move < Net::HTTPRequest + METHOD = 'MOVE' + REQUEST_HAS_BODY = false + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {WebDAV method LOCK}[http://www.webdav.org/specs/rfc4918.html#METHOD_LOCK]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Lock.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#lock: sends +LOCK+ request, returns response object. +# +class Net::HTTP::Lock < Net::HTTPRequest + METHOD = 'LOCK' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + +# \Class for representing +# {WebDAV method UNLOCK}[http://www.webdav.org/specs/rfc4918.html#METHOD_UNLOCK]: +# +# require 'net/http' +# uri = URI('http://example.com') +# hostname = uri.hostname # => "example.com" +# req = Net::HTTP::Unlock.new(uri) # => # +# res = Net::HTTP.start(hostname) do |http| +# http.request(req) +# end +# +# See {Request Headers}[rdoc-ref:Net::HTTPRequest@Request+Headers]. +# +# Related: +# +# - Net::HTTP#unlock: sends +UNLOCK+ request, returns response object. +# +class Net::HTTP::Unlock < Net::HTTPRequest + METHOD = 'UNLOCK' + REQUEST_HAS_BODY = true + RESPONSE_HAS_BODY = true +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/response.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/response.rb new file mode 100644 index 000000000..3aeba2e5c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/response.rb @@ -0,0 +1,738 @@ +# frozen_string_literal: true + +# This class is the base class for \Net::HTTP response classes. +# +# == About the Examples +# +# :include: doc/net-http/examples.rdoc +# +# == Returned Responses +# +# \Method Net::HTTP.get_response returns +# an instance of one of the subclasses of \Net::HTTPResponse: +# +# Net::HTTP.get_response(uri) +# # => # +# Net::HTTP.get_response(hostname, '/nosuch') +# # => # +# +# As does method Net::HTTP#request: +# +# req = Net::HTTP::Get.new(uri) +# Net::HTTP.start(hostname) do |http| +# http.request(req) +# end # => # +# +# \Class \Net::HTTPResponse includes module Net::HTTPHeader, +# which provides access to response header values via (among others): +# +# - \Hash-like method []. +# - Specific reader methods, such as +content_type+. +# +# Examples: +# +# res = Net::HTTP.get_response(uri) # => # +# res['Content-Type'] # => "text/html; charset=UTF-8" +# res.content_type # => "text/html" +# +# == Response Subclasses +# +# \Class \Net::HTTPResponse has a subclass for each +# {HTTP status code}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes]. +# You can look up the response class for a given code: +# +# Net::HTTPResponse::CODE_TO_OBJ['200'] # => Net::HTTPOK +# Net::HTTPResponse::CODE_TO_OBJ['400'] # => Net::HTTPBadRequest +# Net::HTTPResponse::CODE_TO_OBJ['404'] # => Net::HTTPNotFound +# +# And you can retrieve the status code for a response object: +# +# Net::HTTP.get_response(uri).code # => "200" +# Net::HTTP.get_response(hostname, '/nosuch').code # => "404" +# +# The response subclasses (indentation shows class hierarchy): +# +# - Net::HTTPUnknownResponse (for unhandled \HTTP extensions). +# +# - Net::HTTPInformation: +# +# - Net::HTTPContinue (100) +# - Net::HTTPSwitchProtocol (101) +# - Net::HTTPProcessing (102) +# - Net::HTTPEarlyHints (103) +# +# - Net::HTTPSuccess: +# +# - Net::HTTPOK (200) +# - Net::HTTPCreated (201) +# - Net::HTTPAccepted (202) +# - Net::HTTPNonAuthoritativeInformation (203) +# - Net::HTTPNoContent (204) +# - Net::HTTPResetContent (205) +# - Net::HTTPPartialContent (206) +# - Net::HTTPMultiStatus (207) +# - Net::HTTPAlreadyReported (208) +# - Net::HTTPIMUsed (226) +# +# - Net::HTTPRedirection: +# +# - Net::HTTPMultipleChoices (300) +# - Net::HTTPMovedPermanently (301) +# - Net::HTTPFound (302) +# - Net::HTTPSeeOther (303) +# - Net::HTTPNotModified (304) +# - Net::HTTPUseProxy (305) +# - Net::HTTPTemporaryRedirect (307) +# - Net::HTTPPermanentRedirect (308) +# +# - Net::HTTPClientError: +# +# - Net::HTTPBadRequest (400) +# - Net::HTTPUnauthorized (401) +# - Net::HTTPPaymentRequired (402) +# - Net::HTTPForbidden (403) +# - Net::HTTPNotFound (404) +# - Net::HTTPMethodNotAllowed (405) +# - Net::HTTPNotAcceptable (406) +# - Net::HTTPProxyAuthenticationRequired (407) +# - Net::HTTPRequestTimeOut (408) +# - Net::HTTPConflict (409) +# - Net::HTTPGone (410) +# - Net::HTTPLengthRequired (411) +# - Net::HTTPPreconditionFailed (412) +# - Net::HTTPRequestEntityTooLarge (413) +# - Net::HTTPRequestURITooLong (414) +# - Net::HTTPUnsupportedMediaType (415) +# - Net::HTTPRequestedRangeNotSatisfiable (416) +# - Net::HTTPExpectationFailed (417) +# - Net::HTTPMisdirectedRequest (421) +# - Net::HTTPUnprocessableEntity (422) +# - Net::HTTPLocked (423) +# - Net::HTTPFailedDependency (424) +# - Net::HTTPUpgradeRequired (426) +# - Net::HTTPPreconditionRequired (428) +# - Net::HTTPTooManyRequests (429) +# - Net::HTTPRequestHeaderFieldsTooLarge (431) +# - Net::HTTPUnavailableForLegalReasons (451) +# +# - Net::HTTPServerError: +# +# - Net::HTTPInternalServerError (500) +# - Net::HTTPNotImplemented (501) +# - Net::HTTPBadGateway (502) +# - Net::HTTPServiceUnavailable (503) +# - Net::HTTPGatewayTimeOut (504) +# - Net::HTTPVersionNotSupported (505) +# - Net::HTTPVariantAlsoNegotiates (506) +# - Net::HTTPInsufficientStorage (507) +# - Net::HTTPLoopDetected (508) +# - Net::HTTPNotExtended (510) +# - Net::HTTPNetworkAuthenticationRequired (511) +# +# There is also the Net::HTTPBadResponse exception which is raised when +# there is a protocol error. +# +class Net::HTTPResponse + class << self + # true if the response has a body. + def body_permitted? + self::HAS_BODY + end + + def exception_type # :nodoc: internal use only + self::EXCEPTION_TYPE + end + + def read_new(sock) #:nodoc: internal use only + httpv, code, msg = read_status_line(sock) + res = response_class(code).new(httpv, code, msg) + each_response_header(sock) do |k,v| + res.add_field k, v + end + res + end + + private + + def read_status_line(sock) + str = sock.readline + m = /\AHTTP(?:\/(\d+\.\d+))?\s+(\d\d\d)(?:\s+(.*))?\z/in.match(str) or + raise Net::HTTPBadResponse, "wrong status line: #{str.dump}" + m.captures + end + + def response_class(code) + CODE_TO_OBJ[code] or + CODE_CLASS_TO_OBJ[code[0,1]] or + Net::HTTPUnknownResponse + end + + def each_response_header(sock) + key = value = nil + while true + line = sock.readuntil("\n", true).sub(/\s+\z/, '') + break if line.empty? + if line[0] == ?\s or line[0] == ?\t and value + value << ' ' unless value.empty? + value << line.strip + else + yield key, value if key + key, value = line.strip.split(/\s*:\s*/, 2) + raise Net::HTTPBadResponse, 'wrong header line format' if value.nil? + end + end + yield key, value if key + end + end + + # next is to fix bug in RDoc, where the private inside class << self + # spills out. + public + + include Net::HTTPHeader + + def initialize(httpv, code, msg) #:nodoc: internal use only + @http_version = httpv + @code = code + @message = msg + initialize_http_header nil + @body = nil + @read = false + @uri = nil + @decode_content = false + @body_encoding = false + @ignore_eof = true + end + + # The HTTP version supported by the server. + attr_reader :http_version + + # The HTTP result code string. For example, '302'. You can also + # determine the response type by examining which response subclass + # the response object is an instance of. + attr_reader :code + + # The HTTP result message sent by the server. For example, 'Not Found'. + attr_reader :message + alias msg message # :nodoc: obsolete + + # The URI used to fetch this response. The response URI is only available + # if a URI was used to create the request. + attr_reader :uri + + # Set to true automatically when the request did not contain an + # Accept-Encoding header from the user. + attr_accessor :decode_content + + # Returns the value set by body_encoding=, or +false+ if none; + # see #body_encoding=. + attr_reader :body_encoding + + # Sets the encoding that should be used when reading the body: + # + # - If the given value is an Encoding object, that encoding will be used. + # - Otherwise if the value is a string, the value of + # {Encoding#find(value)}[https://docs.ruby-lang.org/en/master/Encoding.html#method-c-find] + # will be used. + # - Otherwise an encoding will be deduced from the body itself. + # + # Examples: + # + # http = Net::HTTP.new(hostname) + # req = Net::HTTP::Get.new('/') + # + # http.request(req) do |res| + # p res.body.encoding # => # + # end + # + # http.request(req) do |res| + # res.body_encoding = "UTF-8" + # p res.body.encoding # => # + # end + # + def body_encoding=(value) + value = Encoding.find(value) if value.is_a?(String) + @body_encoding = value + end + + # Whether to ignore EOF when reading bodies with a specified Content-Length + # header. + attr_accessor :ignore_eof + + def inspect + "#<#{self.class} #{@code} #{@message} readbody=#{@read}>" + end + + # + # response <-> exception relationship + # + + def code_type #:nodoc: + self.class + end + + def error! #:nodoc: + message = @code + message = "#{message} #{@message.dump}" if @message + raise error_type().new(message, self) + end + + def error_type #:nodoc: + self.class::EXCEPTION_TYPE + end + + # Raises an HTTP error if the response is not 2xx (success). + def value + error! unless self.kind_of?(Net::HTTPSuccess) + end + + def uri= uri # :nodoc: + @uri = uri.dup if uri + end + + # + # header (for backward compatibility only; DO NOT USE) + # + + def response #:nodoc: + warn "Net::HTTPResponse#response is obsolete", uplevel: 1 if $VERBOSE + self + end + + def header #:nodoc: + warn "Net::HTTPResponse#header is obsolete", uplevel: 1 if $VERBOSE + self + end + + def read_header #:nodoc: + warn "Net::HTTPResponse#read_header is obsolete", uplevel: 1 if $VERBOSE + self + end + + # + # body + # + + def reading_body(sock, reqmethodallowbody) #:nodoc: internal use only + @socket = sock + @body_exist = reqmethodallowbody && self.class.body_permitted? + begin + yield + self.body # ensure to read body + ensure + @socket = nil + end + end + + # Gets the entity body returned by the remote HTTP server. + # + # If a block is given, the body is passed to the block, and + # the body is provided in fragments, as it is read in from the socket. + # + # If +dest+ argument is given, response is read into that variable, + # with dest#<< method (it could be String or IO, or any + # other object responding to <<). + # + # Calling this method a second or subsequent time for the same + # HTTPResponse object will return the value already read. + # + # http.request_get('/index.html') {|res| + # puts res.read_body + # } + # + # http.request_get('/index.html') {|res| + # p res.read_body.object_id # 538149362 + # p res.read_body.object_id # 538149362 + # } + # + # # using iterator + # http.request_get('/index.html') {|res| + # res.read_body do |segment| + # print segment + # end + # } + # + def read_body(dest = nil, &block) + if @read + raise IOError, "#{self.class}\#read_body called twice" if dest or block + return @body + end + to = procdest(dest, block) + stream_check + if @body_exist + read_body_0 to + @body = to + else + @body = nil + end + @read = true + return if @body.nil? + + case enc = @body_encoding + when Encoding, false, nil + # Encoding: force given encoding + # false/nil: do not force encoding + else + # other value: detect encoding from body + enc = detect_encoding(@body) + end + + @body.force_encoding(enc) if enc + + @body + end + + # Returns the string response body; + # note that repeated calls for the unmodified body return a cached string: + # + # path = '/todos/1' + # Net::HTTP.start(hostname) do |http| + # res = http.get(path) + # p res.body + # p http.head(path).body # No body. + # end + # + # Output: + # + # "{\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"delectus aut autem\",\n \"completed\": false\n}" + # nil + # + def body + read_body() + end + + # Sets the body of the response to the given value. + def body=(value) + @body = value + end + + alias entity body #:nodoc: obsolete + + private + + # :nodoc: + def detect_encoding(str, encoding=nil) + if encoding + elsif encoding = type_params['charset'] + elsif encoding = check_bom(str) + else + encoding = case content_type&.downcase + when %r{text/x(?:ht)?ml|application/(?:[^+]+\+)?xml} + /\A' + ss.getch + return nil + end + name = ss.scan(/[^=\t\n\f\r \/>]*/) + name.downcase! + raise if name.empty? + ss.skip(/[\t\n\f\r ]*/) + if ss.getch != '=' + value = '' + return [name, value] + end + ss.skip(/[\t\n\f\r ]*/) + case ss.peek(1) + when '"' + ss.getch + value = ss.scan(/[^"]+/) + value.downcase! + ss.getch + when "'" + ss.getch + value = ss.scan(/[^']+/) + value.downcase! + ss.getch + when '>' + value = '' + else + value = ss.scan(/[^\t\n\f\r >]+/) + value.downcase! + end + [name, value] + end + + def extracting_encodings_from_meta_elements(value) + # http://dev.w3.org/html5/spec/fetching-resources.html#algorithm-for-extracting-an-encoding-from-a-meta-element + if /charset[\t\n\f\r ]*=(?:"([^"]*)"|'([^']*)'|["']|\z|([^\t\n\f\r ;]+))/i =~ value + return $1 || $2 || $3 + end + return nil + end + + ## + # Checks for a supported Content-Encoding header and yields an Inflate + # wrapper for this response's socket when zlib is present. If the + # Content-Encoding is not supported or zlib is missing, the plain socket is + # yielded. + # + # If a Content-Range header is present, a plain socket is yielded as the + # bytes in the range may not be a complete deflate block. + + def inflater # :nodoc: + return yield @socket unless Net::HTTP::HAVE_ZLIB + return yield @socket unless @decode_content + return yield @socket if self['content-range'] + + v = self['content-encoding'] + case v&.downcase + when 'deflate', 'gzip', 'x-gzip' then + self.delete 'content-encoding' + + inflate_body_io = Inflater.new(@socket) + + begin + yield inflate_body_io + success = true + ensure + begin + inflate_body_io.finish + if self['content-length'] + self['content-length'] = inflate_body_io.bytes_inflated.to_s + end + rescue => err + # Ignore #finish's error if there is an exception from yield + raise err if success + end + end + when 'none', 'identity' then + self.delete 'content-encoding' + + yield @socket + else + yield @socket + end + end + + def read_body_0(dest) + inflater do |inflate_body_io| + if chunked? + read_chunked dest, inflate_body_io + return + end + + @socket = inflate_body_io + + clen = content_length() + if clen + @socket.read clen, dest, @ignore_eof + return + end + clen = range_length() + if clen + @socket.read clen, dest + return + end + @socket.read_all dest + end + end + + ## + # read_chunked reads from +@socket+ for chunk-size, chunk-extension, CRLF, + # etc. and +chunk_data_io+ for chunk-data which may be deflate or gzip + # encoded. + # + # See RFC 2616 section 3.6.1 for definitions + + def read_chunked(dest, chunk_data_io) # :nodoc: + total = 0 + while true + line = @socket.readline + hexlen = line.slice(/[0-9a-fA-F]+/) or + raise Net::HTTPBadResponse, "wrong chunk size line: #{line}" + len = hexlen.hex + break if len == 0 + begin + chunk_data_io.read len, dest + ensure + total += len + @socket.read 2 # \r\n + end + end + until @socket.readline.empty? + # none + end + end + + def stream_check + raise IOError, 'attempt to read body out of block' if @socket.nil? || @socket.closed? + end + + def procdest(dest, block) + raise ArgumentError, 'both arg and block given for HTTP method' if + dest and block + if block + Net::ReadAdapter.new(block) + else + dest || +'' + end + end + + ## + # Inflater is a wrapper around Net::BufferedIO that transparently inflates + # zlib and gzip streams. + + class Inflater # :nodoc: + + ## + # Creates a new Inflater wrapping +socket+ + + def initialize socket + @socket = socket + # zlib with automatic gzip detection + @inflate = Zlib::Inflate.new(32 + Zlib::MAX_WBITS) + end + + ## + # Finishes the inflate stream. + + def finish + return if @inflate.total_in == 0 + @inflate.finish + end + + ## + # The number of bytes inflated, used to update the Content-Length of + # the response. + + def bytes_inflated + @inflate.total_out + end + + ## + # Returns a Net::ReadAdapter that inflates each read chunk into +dest+. + # + # This allows a large response body to be inflated without storing the + # entire body in memory. + + def inflate_adapter(dest) + if dest.respond_to?(:set_encoding) + dest.set_encoding(Encoding::ASCII_8BIT) + elsif dest.respond_to?(:force_encoding) + dest.force_encoding(Encoding::ASCII_8BIT) + end + block = proc do |compressed_chunk| + @inflate.inflate(compressed_chunk) do |chunk| + compressed_chunk.clear + dest << chunk + end + end + + Net::ReadAdapter.new(block) + end + + ## + # Reads +clen+ bytes from the socket, inflates them, then writes them to + # +dest+. +ignore_eof+ is passed down to Net::BufferedIO#read + # + # Unlike Net::BufferedIO#read, this method returns more than +clen+ bytes. + # At this time there is no way for a user of Net::HTTPResponse to read a + # specific number of bytes from the HTTP response body, so this internal + # API does not return the same number of bytes as were requested. + # + # See https://bugs.ruby-lang.org/issues/6492 for further discussion. + + def read clen, dest, ignore_eof = false + temp_dest = inflate_adapter(dest) + + @socket.read clen, temp_dest, ignore_eof + end + + ## + # Reads the rest of the socket, inflates it, then writes it to +dest+. + + def read_all dest + temp_dest = inflate_adapter(dest) + + @socket.read_all temp_dest + end + + end + +end + diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/responses.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/responses.rb new file mode 100644 index 000000000..6f6fb8d05 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/responses.rb @@ -0,0 +1,1174 @@ +# frozen_string_literal: true +#-- +# https://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml + +module Net + + class HTTPUnknownResponse < HTTPResponse + HAS_BODY = true + EXCEPTION_TYPE = HTTPError # + end + + # Parent class for informational (1xx) HTTP response classes. + # + # An informational response indicates that the request was received and understood. + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#status.1xx]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#1xx_informational_response]. + # + class HTTPInformation < HTTPResponse + HAS_BODY = false + EXCEPTION_TYPE = HTTPError # + end + + # Parent class for success (2xx) HTTP response classes. + # + # A success response indicates the action requested by the client + # was received, understood, and accepted. + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#status.2xx]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#2xx_success]. + # + class HTTPSuccess < HTTPResponse + HAS_BODY = true + EXCEPTION_TYPE = HTTPError # + end + + # Parent class for redirection (3xx) HTTP response classes. + # + # A redirection response indicates the client must take additional action + # to complete the request. + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#status.3xx]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#3xx_redirection]. + # + class HTTPRedirection < HTTPResponse + HAS_BODY = true + EXCEPTION_TYPE = HTTPRetriableError # + end + + # Parent class for client error (4xx) HTTP response classes. + # + # A client error response indicates that the client may have caused an error. + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#status.4xx]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#4xx_client_errors]. + # + class HTTPClientError < HTTPResponse + HAS_BODY = true + EXCEPTION_TYPE = HTTPClientException # + end + + # Parent class for server error (5xx) HTTP response classes. + # + # A server error response indicates that the server failed to fulfill a request. + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#status.5xx]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#5xx_server_errors]. + # + class HTTPServerError < HTTPResponse + HAS_BODY = true + EXCEPTION_TYPE = HTTPFatalError # + end + + # Response class for +Continue+ responses (status code 100). + # + # A +Continue+ response indicates that the server has received the request headers. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/100]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-100-continue]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#100]. + # + class HTTPContinue < HTTPInformation + HAS_BODY = false + end + + # Response class for Switching Protocol responses (status code 101). + # + # The Switching Protocol response indicates that the server has received + # a request to switch protocols, and has agreed to do so. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#101]. + # + class HTTPSwitchProtocol < HTTPInformation + HAS_BODY = false + end + + # Response class for +Processing+ responses (status code 102). + # + # The +Processing+ response indicates that the server has received + # and is processing the request, but no response is available yet. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 2518}[https://www.rfc-editor.org/rfc/rfc2518#section-10.1]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#102]. + # + class HTTPProcessing < HTTPInformation + HAS_BODY = false + end + + # Response class for Early Hints responses (status code 103). + # + # The Early Hints indicates that the server has received + # and is processing the request, and contains certain headers; + # the final response is not available yet. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/103]. + # - {RFC 8297}[https://www.rfc-editor.org/rfc/rfc8297.html#section-2]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#103]. + # + class HTTPEarlyHints < HTTPInformation + HAS_BODY = false + end + + # Response class for +OK+ responses (status code 200). + # + # The +OK+ response indicates that the server has received + # a request and has responded successfully. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/200]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-200-ok]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#200]. + # + class HTTPOK < HTTPSuccess + HAS_BODY = true + end + + # Response class for +Created+ responses (status code 201). + # + # The +Created+ response indicates that the server has received + # and has fulfilled a request to create a new resource. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/201]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-201-created]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#201]. + # + class HTTPCreated < HTTPSuccess + HAS_BODY = true + end + + # Response class for +Accepted+ responses (status code 202). + # + # The +Accepted+ response indicates that the server has received + # and is processing a request, but the processing has not yet been completed. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/202]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-202-accepted]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#202]. + # + class HTTPAccepted < HTTPSuccess + HAS_BODY = true + end + + # Response class for Non-Authoritative Information responses (status code 203). + # + # The Non-Authoritative Information response indicates that the server + # is a transforming proxy (such as a Web accelerator) + # that received a 200 OK response from its origin, + # and is returning a modified version of the origin's response. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/203]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-203-non-authoritative-infor]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#203]. + # + class HTTPNonAuthoritativeInformation < HTTPSuccess + HAS_BODY = true + end + + # Response class for No Content responses (status code 204). + # + # The No Content response indicates that the server + # successfully processed the request, and is not returning any content. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/204]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-204-no-content]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#204]. + # + class HTTPNoContent < HTTPSuccess + HAS_BODY = false + end + + # Response class for Reset Content responses (status code 205). + # + # The Reset Content response indicates that the server + # successfully processed the request, + # asks that the client reset its document view, and is not returning any content. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/205]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-205-reset-content]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#205]. + # + class HTTPResetContent < HTTPSuccess + HAS_BODY = false + end + + # Response class for Partial Content responses (status code 206). + # + # The Partial Content response indicates that the server is delivering + # only part of the resource (byte serving) + # due to a Range header in the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/206]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-206-partial-content]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#206]. + # + class HTTPPartialContent < HTTPSuccess + HAS_BODY = true + end + + # Response class for Multi-Status (WebDAV) responses (status code 207). + # + # The Multi-Status (WebDAV) response indicates that the server + # has received the request, + # and that the message body can contain a number of separate response codes. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 4818}[https://www.rfc-editor.org/rfc/rfc4918#section-11.1]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#207]. + # + class HTTPMultiStatus < HTTPSuccess + HAS_BODY = true + end + + # Response class for Already Reported (WebDAV) responses (status code 208). + # + # The Already Reported (WebDAV) response indicates that the server + # has received the request, + # and that the members of a DAV binding have already been enumerated + # in a preceding part of the (multi-status) response, + # and are not being included again. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 5842}[https://www.rfc-editor.org/rfc/rfc5842.html#section-7.1]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#208]. + # + class HTTPAlreadyReported < HTTPSuccess + HAS_BODY = true + end + + # Response class for IM Used responses (status code 226). + # + # The IM Used response indicates that the server has fulfilled a request + # for the resource, and the response is a representation of the result + # of one or more instance-manipulations applied to the current instance. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 3229}[https://www.rfc-editor.org/rfc/rfc3229.html#section-10.4.1]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#226]. + # + class HTTPIMUsed < HTTPSuccess + HAS_BODY = true + end + + # Response class for Multiple Choices responses (status code 300). + # + # The Multiple Choices response indicates that the server + # offers multiple options for the resource from which the client may choose. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/300]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-300-multiple-choices]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#300]. + # + class HTTPMultipleChoices < HTTPRedirection + HAS_BODY = true + end + HTTPMultipleChoice = HTTPMultipleChoices + + # Response class for Moved Permanently responses (status code 301). + # + # The Moved Permanently response indicates that links or records + # returning this response should be updated to use the given URL. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/301]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-301-moved-permanently]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#301]. + # + class HTTPMovedPermanently < HTTPRedirection + HAS_BODY = true + end + + # Response class for Found responses (status code 302). + # + # The Found response indicates that the client + # should look at (browse to) another URL. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/302]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-302-found]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#302]. + # + class HTTPFound < HTTPRedirection + HAS_BODY = true + end + HTTPMovedTemporarily = HTTPFound + + # Response class for See Other responses (status code 303). + # + # The response to the request can be found under another URI using the GET method. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/303]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-303-see-other]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#303]. + # + class HTTPSeeOther < HTTPRedirection + HAS_BODY = true + end + + # Response class for Not Modified responses (status code 304). + # + # Indicates that the resource has not been modified since the version + # specified by the request headers. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/304]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-304-not-modified]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#304]. + # + class HTTPNotModified < HTTPRedirection + HAS_BODY = false + end + + # Response class for Use Proxy responses (status code 305). + # + # The requested resource is available only through a proxy, + # whose address is provided in the response. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-305-use-proxy]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#305]. + # + class HTTPUseProxy < HTTPRedirection + HAS_BODY = false + end + + # Response class for Temporary Redirect responses (status code 307). + # + # The request should be repeated with another URI; + # however, future requests should still use the original URI. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/307]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-307-temporary-redirect]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#307]. + # + class HTTPTemporaryRedirect < HTTPRedirection + HAS_BODY = true + end + + # Response class for Permanent Redirect responses (status code 308). + # + # This and all future requests should be directed to the given URI. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-308-permanent-redirect]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#308]. + # + class HTTPPermanentRedirect < HTTPRedirection + HAS_BODY = true + end + + # Response class for Bad Request responses (status code 400). + # + # The server cannot or will not process the request due to an apparent client error. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-400-bad-request]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#400]. + # + class HTTPBadRequest < HTTPClientError + HAS_BODY = true + end + + # Response class for Unauthorized responses (status code 401). + # + # Authentication is required, but either was not provided or failed. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-401-unauthorized]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#401]. + # + class HTTPUnauthorized < HTTPClientError + HAS_BODY = true + end + + # Response class for Payment Required responses (status code 402). + # + # Reserved for future use. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/402]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-402-payment-required]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#402]. + # + class HTTPPaymentRequired < HTTPClientError + HAS_BODY = true + end + + # Response class for Forbidden responses (status code 403). + # + # The request contained valid data and was understood by the server, + # but the server is refusing action. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/403]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-403-forbidden]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#403]. + # + class HTTPForbidden < HTTPClientError + HAS_BODY = true + end + + # Response class for Not Found responses (status code 404). + # + # The requested resource could not be found but may be available in the future. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-404-not-found]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#404]. + # + class HTTPNotFound < HTTPClientError + HAS_BODY = true + end + + # Response class for Method Not Allowed responses (status code 405). + # + # The request method is not supported for the requested resource. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/405]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-405-method-not-allowed]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#405]. + # + class HTTPMethodNotAllowed < HTTPClientError + HAS_BODY = true + end + + # Response class for Not Acceptable responses (status code 406). + # + # The requested resource is capable of generating only content + # that not acceptable according to the Accept headers sent in the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/406]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-406-not-acceptable]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#406]. + # + class HTTPNotAcceptable < HTTPClientError + HAS_BODY = true + end + + # Response class for Proxy Authentication Required responses (status code 407). + # + # The client must first authenticate itself with the proxy. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/407]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-407-proxy-authentication-re]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#407]. + # + class HTTPProxyAuthenticationRequired < HTTPClientError + HAS_BODY = true + end + + # Response class for Request Timeout responses (status code 408). + # + # The server timed out waiting for the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-408-request-timeout]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#408]. + # + class HTTPRequestTimeout < HTTPClientError + HAS_BODY = true + end + HTTPRequestTimeOut = HTTPRequestTimeout + + # Response class for Conflict responses (status code 409). + # + # The request could not be processed because of conflict in the current state of the resource. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/409]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-409-conflict]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#409]. + # + class HTTPConflict < HTTPClientError + HAS_BODY = true + end + + # Response class for Gone responses (status code 410). + # + # The resource requested was previously in use but is no longer available + # and will not be available again. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/410]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-410-gone]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#410]. + # + class HTTPGone < HTTPClientError + HAS_BODY = true + end + + # Response class for Length Required responses (status code 411). + # + # The request did not specify the length of its content, + # which is required by the requested resource. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/411]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-411-length-required]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#411]. + # + class HTTPLengthRequired < HTTPClientError + HAS_BODY = true + end + + # Response class for Precondition Failed responses (status code 412). + # + # The server does not meet one of the preconditions + # specified in the request headers. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/412]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-412-precondition-failed]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#412]. + # + class HTTPPreconditionFailed < HTTPClientError + HAS_BODY = true + end + + # Response class for Payload Too Large responses (status code 413). + # + # The request is larger than the server is willing or able to process. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/413]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-413-content-too-large]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#413]. + # + class HTTPPayloadTooLarge < HTTPClientError + HAS_BODY = true + end + HTTPRequestEntityTooLarge = HTTPPayloadTooLarge + + # Response class for URI Too Long responses (status code 414). + # + # The URI provided was too long for the server to process. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/414]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-414-uri-too-long]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#414]. + # + class HTTPURITooLong < HTTPClientError + HAS_BODY = true + end + HTTPRequestURITooLong = HTTPURITooLong + HTTPRequestURITooLarge = HTTPRequestURITooLong + + # Response class for Unsupported Media Type responses (status code 415). + # + # The request entity has a media type which the server or resource does not support. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/415]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-415-unsupported-media-type]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#415]. + # + class HTTPUnsupportedMediaType < HTTPClientError + HAS_BODY = true + end + + # Response class for Range Not Satisfiable responses (status code 416). + # + # The request entity has a media type which the server or resource does not support. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/416]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-416-range-not-satisfiable]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#416]. + # + class HTTPRangeNotSatisfiable < HTTPClientError + HAS_BODY = true + end + HTTPRequestedRangeNotSatisfiable = HTTPRangeNotSatisfiable + + # Response class for Expectation Failed responses (status code 417). + # + # The server cannot meet the requirements of the Expect request-header field. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/417]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-417-expectation-failed]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#417]. + # + class HTTPExpectationFailed < HTTPClientError + HAS_BODY = true + end + + # 418 I'm a teapot - RFC 2324; a joke RFC + # See https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#418. + + # 420 Enhance Your Calm - Twitter + + # Response class for Misdirected Request responses (status code 421). + # + # The request was directed at a server that is not able to produce a response. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-421-misdirected-request]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#421]. + # + class HTTPMisdirectedRequest < HTTPClientError + HAS_BODY = true + end + + # Response class for Unprocessable Entity responses (status code 422). + # + # The request was well-formed but had semantic errors. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-422-unprocessable-content]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#422]. + # + class HTTPUnprocessableEntity < HTTPClientError + HAS_BODY = true + end + + # Response class for Locked (WebDAV) responses (status code 423). + # + # The requested resource is locked. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 4918}[https://www.rfc-editor.org/rfc/rfc4918#section-11.3]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#423]. + # + class HTTPLocked < HTTPClientError + HAS_BODY = true + end + + # Response class for Failed Dependency (WebDAV) responses (status code 424). + # + # The request failed because it depended on another request and that request failed. + # See {424 Failed Dependency (WebDAV)}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#424]. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {RFC 4918}[https://www.rfc-editor.org/rfc/rfc4918#section-11.4]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#424]. + # + class HTTPFailedDependency < HTTPClientError + HAS_BODY = true + end + + # 425 Too Early + # https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#425. + + # Response class for Upgrade Required responses (status code 426). + # + # The client should switch to the protocol given in the Upgrade header field. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/426]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-426-upgrade-required]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#426]. + # + class HTTPUpgradeRequired < HTTPClientError + HAS_BODY = true + end + + # Response class for Precondition Required responses (status code 428). + # + # The origin server requires the request to be conditional. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/428]. + # - {RFC 6585}[https://www.rfc-editor.org/rfc/rfc6585#section-3]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#428]. + # + class HTTPPreconditionRequired < HTTPClientError + HAS_BODY = true + end + + # Response class for Too Many Requests responses (status code 429). + # + # The user has sent too many requests in a given amount of time. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429]. + # - {RFC 6585}[https://www.rfc-editor.org/rfc/rfc6585#section-4]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#429]. + # + class HTTPTooManyRequests < HTTPClientError + HAS_BODY = true + end + + # Response class for Request Header Fields Too Large responses (status code 431). + # + # An individual header field is too large, + # or all the header fields collectively, are too large. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/431]. + # - {RFC 6585}[https://www.rfc-editor.org/rfc/rfc6585#section-5]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#431]. + # + class HTTPRequestHeaderFieldsTooLarge < HTTPClientError + HAS_BODY = true + end + + # Response class for Unavailable For Legal Reasons responses (status code 451). + # + # A server operator has received a legal demand to deny access to a resource or to a set of resources + # that includes the requested resource. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/451]. + # - {RFC 7725}[https://www.rfc-editor.org/rfc/rfc7725.html#section-3]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#451]. + # + class HTTPUnavailableForLegalReasons < HTTPClientError + HAS_BODY = true + end + # 444 No Response - Nginx + # 449 Retry With - Microsoft + # 450 Blocked by Windows Parental Controls - Microsoft + # 499 Client Closed Request - Nginx + + # Response class for Internal Server Error responses (status code 500). + # + # An unexpected condition was encountered and no more specific message is suitable. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-500-internal-server-error]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#500]. + # + class HTTPInternalServerError < HTTPServerError + HAS_BODY = true + end + + # Response class for Not Implemented responses (status code 501). + # + # The server either does not recognize the request method, + # or it lacks the ability to fulfil the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/501]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-501-not-implemented]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#501]. + # + class HTTPNotImplemented < HTTPServerError + HAS_BODY = true + end + + # Response class for Bad Gateway responses (status code 502). + # + # The server was acting as a gateway or proxy + # and received an invalid response from the upstream server. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/502]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-502-bad-gateway]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#502]. + # + class HTTPBadGateway < HTTPServerError + HAS_BODY = true + end + + # Response class for Service Unavailable responses (status code 503). + # + # The server cannot handle the request + # (because it is overloaded or down for maintenance). + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/503]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-503-service-unavailable]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#503]. + # + class HTTPServiceUnavailable < HTTPServerError + HAS_BODY = true + end + + # Response class for Gateway Timeout responses (status code 504). + # + # The server was acting as a gateway or proxy + # and did not receive a timely response from the upstream server. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/504]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-504-gateway-timeout]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#504]. + # + class HTTPGatewayTimeout < HTTPServerError + HAS_BODY = true + end + HTTPGatewayTimeOut = HTTPGatewayTimeout + + # Response class for HTTP Version Not Supported responses (status code 505). + # + # The server does not support the HTTP version used in the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/505]. + # - {RFC 9110}[https://www.rfc-editor.org/rfc/rfc9110.html#name-505-http-version-not-suppor]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#505]. + # + class HTTPVersionNotSupported < HTTPServerError + HAS_BODY = true + end + + # Response class for Variant Also Negotiates responses (status code 506). + # + # Transparent content negotiation for the request results in a circular reference. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/506]. + # - {RFC 2295}[https://www.rfc-editor.org/rfc/rfc2295#section-8.1]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#506]. + # + class HTTPVariantAlsoNegotiates < HTTPServerError + HAS_BODY = true + end + + # Response class for Insufficient Storage (WebDAV) responses (status code 507). + # + # The server is unable to store the representation needed to complete the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/507]. + # - {RFC 4918}[https://www.rfc-editor.org/rfc/rfc4918#section-11.5]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#507]. + # + class HTTPInsufficientStorage < HTTPServerError + HAS_BODY = true + end + + # Response class for Loop Detected (WebDAV) responses (status code 508). + # + # The server detected an infinite loop while processing the request. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/508]. + # - {RFC 5942}[https://www.rfc-editor.org/rfc/rfc5842.html#section-7.2]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#508]. + # + class HTTPLoopDetected < HTTPServerError + HAS_BODY = true + end + # 509 Bandwidth Limit Exceeded - Apache bw/limited extension + + # Response class for Not Extended responses (status code 510). + # + # Further extensions to the request are required for the server to fulfill it. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/510]. + # - {RFC 2774}[https://www.rfc-editor.org/rfc/rfc2774.html#section-7]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#510]. + # + class HTTPNotExtended < HTTPServerError + HAS_BODY = true + end + + # Response class for Network Authentication Required responses (status code 511). + # + # The client needs to authenticate to gain network access. + # + # :include: doc/net-http/included_getters.rdoc + # + # References: + # + # - {Mozilla}[https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/511]. + # - {RFC 6585}[https://www.rfc-editor.org/rfc/rfc6585#section-6]. + # - {Wikipedia}[https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#511]. + # + class HTTPNetworkAuthenticationRequired < HTTPServerError + HAS_BODY = true + end + +end + +class Net::HTTPResponse + CODE_CLASS_TO_OBJ = { + '1' => Net::HTTPInformation, + '2' => Net::HTTPSuccess, + '3' => Net::HTTPRedirection, + '4' => Net::HTTPClientError, + '5' => Net::HTTPServerError + } + CODE_TO_OBJ = { + '100' => Net::HTTPContinue, + '101' => Net::HTTPSwitchProtocol, + '102' => Net::HTTPProcessing, + '103' => Net::HTTPEarlyHints, + + '200' => Net::HTTPOK, + '201' => Net::HTTPCreated, + '202' => Net::HTTPAccepted, + '203' => Net::HTTPNonAuthoritativeInformation, + '204' => Net::HTTPNoContent, + '205' => Net::HTTPResetContent, + '206' => Net::HTTPPartialContent, + '207' => Net::HTTPMultiStatus, + '208' => Net::HTTPAlreadyReported, + '226' => Net::HTTPIMUsed, + + '300' => Net::HTTPMultipleChoices, + '301' => Net::HTTPMovedPermanently, + '302' => Net::HTTPFound, + '303' => Net::HTTPSeeOther, + '304' => Net::HTTPNotModified, + '305' => Net::HTTPUseProxy, + '307' => Net::HTTPTemporaryRedirect, + '308' => Net::HTTPPermanentRedirect, + + '400' => Net::HTTPBadRequest, + '401' => Net::HTTPUnauthorized, + '402' => Net::HTTPPaymentRequired, + '403' => Net::HTTPForbidden, + '404' => Net::HTTPNotFound, + '405' => Net::HTTPMethodNotAllowed, + '406' => Net::HTTPNotAcceptable, + '407' => Net::HTTPProxyAuthenticationRequired, + '408' => Net::HTTPRequestTimeout, + '409' => Net::HTTPConflict, + '410' => Net::HTTPGone, + '411' => Net::HTTPLengthRequired, + '412' => Net::HTTPPreconditionFailed, + '413' => Net::HTTPPayloadTooLarge, + '414' => Net::HTTPURITooLong, + '415' => Net::HTTPUnsupportedMediaType, + '416' => Net::HTTPRangeNotSatisfiable, + '417' => Net::HTTPExpectationFailed, + '421' => Net::HTTPMisdirectedRequest, + '422' => Net::HTTPUnprocessableEntity, + '423' => Net::HTTPLocked, + '424' => Net::HTTPFailedDependency, + '426' => Net::HTTPUpgradeRequired, + '428' => Net::HTTPPreconditionRequired, + '429' => Net::HTTPTooManyRequests, + '431' => Net::HTTPRequestHeaderFieldsTooLarge, + '451' => Net::HTTPUnavailableForLegalReasons, + + '500' => Net::HTTPInternalServerError, + '501' => Net::HTTPNotImplemented, + '502' => Net::HTTPBadGateway, + '503' => Net::HTTPServiceUnavailable, + '504' => Net::HTTPGatewayTimeout, + '505' => Net::HTTPVersionNotSupported, + '506' => Net::HTTPVariantAlsoNegotiates, + '507' => Net::HTTPInsufficientStorage, + '508' => Net::HTTPLoopDetected, + '510' => Net::HTTPNotExtended, + '511' => Net::HTTPNetworkAuthenticationRequired, + } +end diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/status.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/status.rb new file mode 100644 index 000000000..e70b47d9f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/http/status.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require_relative '../http' + +if $0 == __FILE__ + require 'open-uri' + File.foreach(__FILE__) do |line| + puts line + break if line.start_with?('end') + end + puts + puts "Net::HTTP::STATUS_CODES = {" + url = "https://www.iana.org/assignments/http-status-codes/http-status-codes-1.csv" + URI(url).read.each_line do |line| + code, mes, = line.split(',') + next if ['(Unused)', 'Unassigned', 'Description'].include?(mes) + puts " #{code} => '#{mes}'," + end + puts "} # :nodoc:" +end + +Net::HTTP::STATUS_CODES = { + 100 => 'Continue', + 101 => 'Switching Protocols', + 102 => 'Processing', + 103 => 'Early Hints', + 200 => 'OK', + 201 => 'Created', + 202 => 'Accepted', + 203 => 'Non-Authoritative Information', + 204 => 'No Content', + 205 => 'Reset Content', + 206 => 'Partial Content', + 207 => 'Multi-Status', + 208 => 'Already Reported', + 226 => 'IM Used', + 300 => 'Multiple Choices', + 301 => 'Moved Permanently', + 302 => 'Found', + 303 => 'See Other', + 304 => 'Not Modified', + 305 => 'Use Proxy', + 307 => 'Temporary Redirect', + 308 => 'Permanent Redirect', + 400 => 'Bad Request', + 401 => 'Unauthorized', + 402 => 'Payment Required', + 403 => 'Forbidden', + 404 => 'Not Found', + 405 => 'Method Not Allowed', + 406 => 'Not Acceptable', + 407 => 'Proxy Authentication Required', + 408 => 'Request Timeout', + 409 => 'Conflict', + 410 => 'Gone', + 411 => 'Length Required', + 412 => 'Precondition Failed', + 413 => 'Content Too Large', + 414 => 'URI Too Long', + 415 => 'Unsupported Media Type', + 416 => 'Range Not Satisfiable', + 417 => 'Expectation Failed', + 421 => 'Misdirected Request', + 422 => 'Unprocessable Content', + 423 => 'Locked', + 424 => 'Failed Dependency', + 425 => 'Too Early', + 426 => 'Upgrade Required', + 428 => 'Precondition Required', + 429 => 'Too Many Requests', + 431 => 'Request Header Fields Too Large', + 451 => 'Unavailable For Legal Reasons', + 500 => 'Internal Server Error', + 501 => 'Not Implemented', + 502 => 'Bad Gateway', + 503 => 'Service Unavailable', + 504 => 'Gateway Timeout', + 505 => 'HTTP Version Not Supported', + 506 => 'Variant Also Negotiates', + 507 => 'Insufficient Storage', + 508 => 'Loop Detected', + 510 => 'Not Extended (OBSOLETED)', + 511 => 'Network Authentication Required', +} # :nodoc: diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/https.rb b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/https.rb new file mode 100644 index 000000000..0f23e1fb1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/lib/net/https.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true +=begin + += net/https -- SSL/TLS enhancement for Net::HTTP. + + This file has been merged with net/http. There is no longer any need to + require 'net/https' to use HTTPS. + + See Net::HTTP for details on how to make HTTPS connections. + +== Info + 'OpenSSL for Ruby 2' project + Copyright (C) 2001 GOTOU Yuuzou + All rights reserved. + +== Licence + This program is licensed under the same licence as Ruby. + (See the file 'LICENCE'.) + +=end + +require_relative 'http' +require 'openssl' diff --git a/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/net-http.gemspec b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/net-http.gemspec new file mode 100644 index 000000000..002113679 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/net-http-0.6.0/net-http.gemspec @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +name = File.basename(__FILE__, ".gemspec") +version = ["lib", Array.new(name.count("-")+1, "..").join("/")].find do |dir| + file = File.join(__dir__, dir, "#{name.tr('-', '/')}.rb") + begin + break File.foreach(file, mode: "rb") do |line| + /^\s*VERSION\s*=\s*"(.*)"/ =~ line and break $1 + end + rescue SystemCallError + next + end +end + +Gem::Specification.new do |spec| + spec.name = name + spec.version = version + spec.authors = ["NARUSE, Yui"] + spec.email = ["naruse@airemix.jp"] + + spec.summary = %q{HTTP client api for Ruby.} + spec.description = %q{HTTP client api for Ruby.} + spec.homepage = "https://github.com/ruby/net-http" + spec.required_ruby_version = Gem::Requirement.new(">= 2.6.0") + spec.licenses = ["Ruby", "BSD-2-Clause"] + + spec.metadata["homepage_uri"] = spec.homepage + spec.metadata["source_code_uri"] = spec.homepage + + # Specify which files should be added to the gem when it is released. + # The `git ls-files -z` loads the files in the RubyGem that have been added into git. + spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do + `git ls-files -z 2>#{IO::NULL}`.split("\x0").reject { |f| f.match(%r{\A(?:(?:test|spec|features)/|\.git)}) } + end + spec.bindir = "exe" + spec.require_paths = ["lib"] + + spec.add_dependency "uri" +end diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/Gemfile b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/Gemfile new file mode 100644 index 000000000..affe3ac21 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/Gemfile @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +source "https://rubygems.org" + +gemspec + +group :development do + # bootstrapping + gem "bundler", "~> 2.3" + gem "rake", "13.2.1" + + # building extensions + gem "rake-compiler", "1.2.8" + gem "rake-compiler-dock", "1.7.0" + + # parser generator + gem "rexical", "1.0.8" + + # tests + gem "minitest", "5.25.4" + gem "minitest-parallel_fork", "2.0.0" + gem "ruby_memcheck", "3.0.0" + gem "rubyzip", "~> 2.3.2" + gem "simplecov", "= 0.21.2" + + # rubocop + unless RUBY_PLATFORM == "java" + gem "standard", "1.43.0" + gem "rubocop-minitest", "0.36.0" + gem "rubocop-packaging", "0.5.2" + gem "rubocop-rake", "0.6.0" + end +end + +# If Psych doesn't build, you can disable this group locally by running +# `bundle config set --local without rdoc` +# Then re-run `bundle install`. +group :rdoc do + gem "rdoc", "6.10.0" unless RUBY_PLATFORM == "java" || ENV["CI"] +end diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE-DEPENDENCIES.md b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE-DEPENDENCIES.md new file mode 100644 index 000000000..1e950b6e3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE-DEPENDENCIES.md @@ -0,0 +1,2224 @@ +# Vendored Dependency Licenses + +Nokogiri ships with some third party dependencies, which are listed here along with their licenses. + +Note that this document is broken into multiple sections, each of which describes the dependencies of a different "platform release" of Nokogiri. + + + + + +- [Platform Releases](#platform-releases) + * [Default platform release ("ruby")](#default-platform-release-ruby) + * [Native LinuxⓇ platform releases ("x86_64-linux", "aarch64-linux", and "arm-linux")](#native-linux%E2%93%A1-platform-releases-x86_64-linux-aarch64-linux-and-arm-linux) + * [Native Darwin (macOSⓇ) platform releases ("x86_64-darwin" and "arm64-darwin")](#native-darwin-macos%E2%93%A1-platform-releases-x86_64-darwin-and-arm64-darwin) + * [Native WindowsⓇ platform releases ("x64-mingw-ucrt")](#native-windows%E2%93%A1-platform-releases-x64-mingw-ucrt) + * [JavaⓇ (JRuby) platform release ("java")](#java%E2%93%A1-jruby-platform-release-java) +- [Appendix: Dependencies' License Texts](#appendix-dependencies-license-texts) + * [libgumbo](#libgumbo) + * [libxml2](#libxml2) + * [libxslt](#libxslt) + * [zlib](#zlib) + * [libiconv](#libiconv) + * [isorelax:isorelax](#isorelaxisorelax) + * [net.sf.saxon:Saxon-HE](#netsfsaxonsaxon-he) + * [net.sourceforge.htmlunit:neko-htmlunit](#netsourceforgehtmlunitneko-htmlunit) + * [nu.validator:jing](#nuvalidatorjing) + * [org.nokogiri:nekodtd](#orgnokogirinekodtd) + * [xalan:serializer and xalan:xalan](#xalanserializer-and-xalanxalan) + * [xerces:xercesImpl](#xercesxercesimpl) + * [xml-apis:xml-apis](#xml-apisxml-apis) + + + +Anyone consuming this file via license-tracking software should endeavor to understand which gem file you're downloading and using, so as not to misinterpret the contents of this file and the licenses of the software being distributed. + +You can double-check the dependencies in your gem file by examining the output of `nokogiri -v` after installation, which will emit the complete set of libraries in use (for versions `>= 1.11.0.rc4`). + +In particular, I'm sure somebody's lawyer, somewhere, is going to freak out that the LGPL appears in this file; and so I'd like to take special note that the dependency covered by LGPL, `libiconv`, is only being redistributed in the native Windows and native Darwin platform releases. It's not present in default, JavaⓇ, or native LinuxⓇ releases. + + +## Platform Releases + +### Default platform release ("ruby") + +The default platform release distributes the following dependencies in source form: + +* [libxml2](#libxml2) +* [libxslt](#libxslt) +* [libgumbo](#libgumbo) + +This distribution can be identified by inspecting the included Gem::Specification, which will have the value "ruby" for its "platform" attribute. + + +### Native LinuxⓇ platform releases ("x86_64-linux", "aarch64-linux", and "arm-linux") + +The native LinuxⓇ platform release distributes the following dependencies in source form: + +* [libxml2](#libxml2) +* [libxslt](#libxslt) +* [libgumbo](#libgumbo) +* [zlib](#zlib) + +This distribution can be identified by inspecting the included Gem::Specification, which will have a value similar to "x86_64-linux" or "aarch64-linux" for its "platform.cpu" attribute. + + +### Native Darwin (macOSⓇ) platform releases ("x86_64-darwin" and "arm64-darwin") + +The native Darwin platform release distributes the following dependencies in source form: + +* [libxml2](#libxml2) +* [libxslt](#libxslt) +* [libgumbo](#libgumbo) +* [zlib](#zlib) +* [libiconv](#libiconv) + +This distribution can be identified by inspecting the included Gem::Specification, which will have a value similar to "x86_64-darwin" or "arm64-darwin" for its "platform.cpu" attribute. Darwin is also known more familiarly as "OSX" or "macOSⓇ" and is the operating system for many AppleⓇ computers. + + +### Native WindowsⓇ platform releases ("x64-mingw-ucrt") + +The native WindowsⓇ platform release distributes the following dependencies in source form: + +* [libxml2](#libxml2) +* [libxslt](#libxslt) +* [libgumbo](#libgumbo) +* [zlib](#zlib) +* [libiconv](#libiconv) + +This distribution can be identified by inspecting the included Gem::Specification, which will have a value similar to "x64-mingw-ucrt" for its "platform.cpu" attribute. + + +### JavaⓇ (JRuby) platform release ("java") + +The Java platform release distributes the following dependencies as compiled jar files: + +* [isorelax:isorelax](#isorelaxisorelax) +* [net.sf.saxon:Saxon-HE](#netsfsaxonsaxon-he) +* [net.sourceforge.htmlunit:neko-htmlunit](#netsourceforgehtmlunitneko-htmlunit) +* [nu.validator:jing](#nuvalidatorjing) +* [org.nokogiri:nekodtd](#orgnokogirinekodtd) +* [xalan:serializer and xalan:xalan](#xalanserializer-and-xalanxalan) +* [xerces:xercesImpl](#xercesxercesimpl) +* [xml-apis:xml-apis](#xml-apisxml-apis) + +This distribution can be identified by inspecting the included Gem::Specification, which will have the value "java" for its "platform.os" attribute. + + +## Appendix: Dependencies' License Texts + +This section contains a subsection for each potentially-distributed dependency, which includes the name of the license and the license text. + +Please see previous sections to understand which of these potential dependencies is actually distributed in the gem file you're downloading and using. + + +### libgumbo + +Apache 2.0 + +https://github.com/sparklemotion/nokogiri/blob/main/gumbo-parser/src/README.md + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +### libxml2 + +MIT + +http://xmlsoft.org/ + + Except where otherwise noted in the source code (e.g. the files hash.c, + list.c and the trio files, which are covered by a similar licence but + with different Copyright notices) all the files are: + + Copyright (C) 1998-2012 Daniel Veillard. All Rights Reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is fur- + nished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FIT- + NESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + +### libxslt + +MIT + +http://xmlsoft.org/libxslt/ + + Licence for libxslt except libexslt + ---------------------------------------------------------------------- + Copyright (C) 2001-2002 Daniel Veillard. All Rights Reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is fur- + nished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FIT- + NESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + DANIEL VEILLARD BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CON- + NECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + Except as contained in this notice, the name of Daniel Veillard shall not + be used in advertising or otherwise to promote the sale, use or other deal- + ings in this Software without prior written authorization from him. + + ---------------------------------------------------------------------- + + Licence for libexslt + ---------------------------------------------------------------------- + Copyright (C) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard. + All Rights Reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is fur- + nished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FIT- + NESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CON- + NECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + Except as contained in this notice, the name of the authors shall not + be used in advertising or otherwise to promote the sale, use or other deal- + ings in this Software without prior written authorization from him. + ---------------------------------------------------------------------- + + +### zlib + +zlib license + +http://www.zlib.net/zlib_license.html + + Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + + +### libiconv + +LGPL + +https://www.gnu.org/software/libiconv/ + + GNU LIBRARY GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1991 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + [This is the first released version of the library GPL. It is + numbered 2 because it goes with version 2 of the ordinary GPL.] + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + Licenses are intended to guarantee your freedom to share and change + free software--to make sure the software is free for all its users. + + This license, the Library General Public License, applies to some + specially designated Free Software Foundation software, and to any + other libraries whose authors decide to use it. You can use it for + your libraries, too. + + When we speak of free software, we are referring to freedom, not + price. Our General Public Licenses are designed to make sure that you + have the freedom to distribute copies of free software (and charge for + this service if you wish), that you receive source code or can get it + if you want it, that you can change the software or use pieces of it + in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid + anyone to deny you these rights or to ask you to surrender the rights. + These restrictions translate to certain responsibilities for you if + you distribute copies of the library, or if you modify it. + + For example, if you distribute copies of the library, whether gratis + or for a fee, you must give the recipients all the rights that we gave + you. You must make sure that they, too, receive or can get the source + code. If you link a program with the library, you must provide + complete object files to the recipients so that they can relink them + with the library, after making changes to the library and recompiling + it. And you must show them these terms so they know their rights. + + Our method of protecting your rights has two steps: (1) copyright + the library, and (2) offer you this license which gives you legal + permission to copy, distribute and/or modify the library. + + Also, for each distributor's protection, we want to make certain + that everyone understands that there is no warranty for this free + library. If the library is modified by someone else and passed on, we + want its recipients to know that what they have is not the original + version, so that any problems introduced by others will not reflect on + the original authors' reputations. + + Finally, any free program is threatened constantly by software + patents. We wish to avoid the danger that companies distributing free + software will individually obtain patent licenses, thus in effect + transforming the program into proprietary software. To prevent this, + we have made it clear that any patent must be licensed for everyone's + free use or not licensed at all. + + Most GNU software, including some libraries, is covered by the ordinary + GNU General Public License, which was designed for utility programs. This + license, the GNU Library General Public License, applies to certain + designated libraries. This license is quite different from the ordinary + one; be sure to read it in full, and don't assume that anything in it is + the same as in the ordinary license. + + The reason we have a separate public license for some libraries is that + they blur the distinction we usually make between modifying or adding to a + program and simply using it. Linking a program with a library, without + changing the library, is in some sense simply using the library, and is + analogous to running a utility program or application program. However, in + a textual and legal sense, the linked executable is a combined work, a + derivative of the original library, and the ordinary General Public License + treats it as such. + + Because of this blurred distinction, using the ordinary General + Public License for libraries did not effectively promote software + sharing, because most developers did not use the libraries. We + concluded that weaker conditions might promote sharing better. + + However, unrestricted linking of non-free programs would deprive the + users of those programs of all benefit from the free status of the + libraries themselves. This Library General Public License is intended to + permit developers of non-free programs to use free libraries, while + preserving your freedom as a user of such programs to change the free + libraries that are incorporated in them. (We have not seen how to achieve + this as regards changes in header files, but we have achieved it as regards + changes in the actual functions of the Library.) The hope is that this + will lead to faster development of free libraries. + + The precise terms and conditions for copying, distribution and + modification follow. Pay close attention to the difference between a + "work based on the library" and a "work that uses the library". The + former contains code derived from the library, while the latter only + works together with the library. + + Note that it is possible for a library to be covered by the ordinary + General Public License rather than by this special one. + + GNU LIBRARY GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library which + contains a notice placed by the copyright holder or other authorized + party saying it may be distributed under the terms of this Library + General Public License (also called "this License"). Each licensee is + addressed as "you". + + A "library" means a collection of software functions and/or data + prepared so as to be conveniently linked with application programs + (which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work + which has been distributed under these terms. A "work based on the + Library" means either the Library or any derivative work under + copyright law: that is to say, a work containing the Library or a + portion of it, either verbatim or with modifications and/or translated + straightforwardly into another language. (Hereinafter, translation is + included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for + making modifications to it. For a library, complete source code means + all the source code for all modules it contains, plus any associated + interface definition files, plus the scripts used to control compilation + and installation of the library. + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running a program using the Library is not restricted, and output from + such a program is covered only if its contents constitute a work based + on the Library (independent of the use of the Library in a tool for + writing it). Whether that is true depends on what the Library does + and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's + complete source code as you receive it, in any medium, provided that + you conspicuously and appropriately publish on each copy an + appropriate copyright notice and disclaimer of warranty; keep intact + all the notices that refer to this License and to the absence of any + warranty; and distribute a copy of this License along with the + Library. + + You may charge a fee for the physical act of transferring a copy, + and you may at your option offer warranty protection in exchange for a + fee. + + 2. You may modify your copy or copies of the Library or any portion + of it, thus forming a work based on the Library, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Library, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Library, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote + it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Library. + + In addition, mere aggregation of another work not based on the Library + with the Library (or with a work based on the Library) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public + License instead of this License to a given copy of the Library. To do + this, you must alter all the notices that refer to this License, so + that they refer to the ordinary GNU General Public License, version 2, + instead of to this License. (If a newer version than version 2 of the + ordinary GNU General Public License has appeared, then you can specify + that version instead if you wish.) Do not make any other change in + these notices. + + Once this change is made in a given copy, it is irreversible for + that copy, so the ordinary GNU General Public License applies to all + subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of + the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or + derivative of it, under Section 2) in object code or executable form + under the terms of Sections 1 and 2 above provided that you accompany + it with the complete corresponding machine-readable source code, which + must be distributed under the terms of Sections 1 and 2 above on a + medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy + from a designated place, then offering equivalent access to copy the + source code from the same place satisfies the requirement to + distribute the source code, even though third parties are not + compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the + Library, but is designed to work with the Library by being compiled or + linked with it, is called a "work that uses the Library". Such a + work, in isolation, is not a derivative work of the Library, and + therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library + creates an executable that is a derivative of the Library (because it + contains portions of the Library), rather than a "work that uses the + library". The executable is therefore covered by this License. + Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file + that is part of the Library, the object code for the work may be a + derivative work of the Library even though the source code is not. + Whether this is true is especially significant if the work can be + linked without the Library, or if the work is itself a library. The + threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data + structure layouts and accessors, and small macros and small inline + functions (ten lines or less in length), then the use of the object + file is unrestricted, regardless of whether it is legally a derivative + work. (Executables containing this object code plus portions of the + Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may + distribute the object code for the work under the terms of Section 6. + Any executables containing that work also fall under Section 6, + whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also compile or + link a "work that uses the Library" with the Library to produce a + work containing portions of the Library, and distribute that work + under terms of your choice, provided that the terms permit + modification of the work for the customer's own use and reverse + engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the + Library is used in it and that the Library and its use are covered by + this License. You must supply a copy of this License. If the work + during execution displays copyright notices, you must include the + copyright notice for the Library among them, as well as a reference + directing the user to the copy of this License. Also, you must do one + of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + c) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + d) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the + Library" must include any data and utility programs needed for + reproducing the executable from it. However, as a special exception, + the source code distributed need not include anything that is normally + distributed (in either source or binary form) with the major + components (compiler, kernel, and so on) of the operating system on + which the executable runs, unless that component itself accompanies + the executable. + + It may happen that this requirement contradicts the license + restrictions of other proprietary libraries that do not normally + accompany the operating system. Such a contradiction means you cannot + use both them and the Library together in an executable that you + distribute. + + 7. You may place library facilities that are a work based on the + Library side-by-side in a single library together with other library + facilities not covered by this License, and distribute such a combined + library, provided that the separate distribution of the work based on + the Library and of the other library facilities is otherwise + permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute + the Library except as expressly provided under this License. Any + attempt otherwise to copy, modify, sublicense, link with, or + distribute the Library is void, and will automatically terminate your + rights under this License. However, parties who have received copies, + or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Library or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Library (or any work based on the + Library), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the + Library), the recipient automatically receives a license from the + original licensor to copy, distribute, link with or modify the Library + subject to these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties to + this License. + + 11. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Library at all. For example, if a patent + license would not permit royalty-free redistribution of the Library by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Library. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply, + and the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Library under this License may add + an explicit geographical distribution limitation excluding those countries, + so that distribution is permitted only in or among countries not thus + excluded. In such case, this License incorporates the limitation as if + written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new + versions of the Library General Public License from time to time. + Such new versions will be similar in spirit to the present version, + but may differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the Library + specifies a version number of this License which applies to it and + "any later version", you have the option of following the terms and + conditions either of that version or of any later version published by + the Free Software Foundation. If the Library does not specify a + license version number, you may choose any version ever published by + the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free + programs whose distribution conditions are incompatible with these, + write to the author to ask for permission. For software which is + copyrighted by the Free Software Foundation, write to the Free + Software Foundation; we sometimes make exceptions for this. Our + decision will be guided by the two goals of preserving the free status + of all derivatives of our free software and of promoting the sharing + and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO + WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. + EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR + OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE + LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME + THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN + WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY + AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU + FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR + CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE + LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING + RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A + FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF + SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + DAMAGES. + + END OF TERMS AND CONDITIONS + + +### isorelax:isorelax + +MIT + +http://iso-relax.sourceforge.net/ + + Copyright (c) 2001-2002, SourceForge ISO-RELAX Project (ASAMI + Tomoharu, Daisuke Okajima, Kohsuke Kawaguchi, and MURATA Makoto) + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +### net.sf.saxon:Saxon-HE + +MPL 2.0 + +http://www.saxonica.com/ + + Mozilla Public License Version 2.0 + ================================== + + 1. Definitions + -------------- + + 1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + + 1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + + 1.3. "Contribution" + means Covered Software of a particular Contributor. + + 1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + + 1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + + 1.6. "Executable Form" + means any form of the work other than Source Code Form. + + 1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + + 1.8. "License" + means this document. + + 1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + + 1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + + 1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + + 1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + + 1.13. "Source Code Form" + means the form of the work preferred for making modifications. + + 1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + + 2. License Grants and Conditions + -------------------------------- + + 2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + (a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + (b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + + 2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + + 2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + (a) for any code that a Contributor has removed from Covered Software; + or + + (b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + (c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + + 2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + + 2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights + to grant the rights to its Contributions conveyed by this License. + + 2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + + 2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted + in Section 2.1. + + 3. Responsibilities + ------------------- + + 3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + + 3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + (a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + + (b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + + 3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + + 3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, + or limitations of liability) contained within the Source Code Form of + the Covered Software, except that You may alter any license notices to + the extent required to remedy known factual inaccuracies. + + 3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + + 4. Inability to Comply Due to Statute or Regulation + --------------------------------------------------- + + If it is impossible for You to comply with any of the terms of this + License with respect to some or all of the Covered Software due to + statute, judicial order, or regulation then You must: (a) comply with + the terms of this License to the maximum extent possible; and (b) + describe the limitations and the code they affect. Such description must + be placed in a text file included with all distributions of the Covered + Software under this License. Except to the extent prohibited by statute + or regulation, such description must be sufficiently detailed for a + recipient of ordinary skill to be able to understand it. + + 5. Termination + -------------- + + 5.1. The rights granted under this License will terminate automatically + if You fail to comply with any of its terms. However, if You become + compliant, then the rights granted under this License from a particular + Contributor are reinstated (a) provisionally, unless and until such + Contributor explicitly and finally terminates Your grants, and (b) on an + ongoing basis, if such Contributor fails to notify You of the + non-compliance by some reasonable means prior to 60 days after You have + come back into compliance. Moreover, Your grants from a particular + Contributor are reinstated on an ongoing basis if such Contributor + notifies You of the non-compliance by some reasonable means, this is the + first time You have received notice of non-compliance with this License + from such Contributor, and You become compliant prior to 30 days after + Your receipt of the notice. + + 5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + + 5.3. In the event of termination under Sections 5.1 or 5.2 above, all + end user license agreements (excluding distributors and resellers) which + have been validly granted by You or Your distributors under this License + prior to termination shall survive termination. + + ************************************************************************ + * * + * 6. Disclaimer of Warranty * + * ------------------------- * + * * + * Covered Software is provided under this License on an "as is" * + * basis, without warranty of any kind, either expressed, implied, or * + * statutory, including, without limitation, warranties that the * + * Covered Software is free of defects, merchantable, fit for a * + * particular purpose or non-infringing. The entire risk as to the * + * quality and performance of the Covered Software is with You. * + * Should any Covered Software prove defective in any respect, You * + * (not any Contributor) assume the cost of any necessary servicing, * + * repair, or correction. This disclaimer of warranty constitutes an * + * essential part of this License. No use of any Covered Software is * + * authorized under this License except under this disclaimer. * + * * + ************************************************************************ + + ************************************************************************ + * * + * 7. Limitation of Liability * + * -------------------------- * + * * + * Under no circumstances and under no legal theory, whether tort * + * (including negligence), contract, or otherwise, shall any * + * Contributor, or anyone who distributes Covered Software as * + * permitted above, be liable to You for any direct, indirect, * + * special, incidental, or consequential damages of any character * + * including, without limitation, damages for lost profits, loss of * + * goodwill, work stoppage, computer failure or malfunction, or any * + * and all other commercial damages or losses, even if such party * + * shall have been informed of the possibility of such damages. This * + * limitation of liability shall not apply to liability for death or * + * personal injury resulting from such party's negligence to the * + * extent applicable law prohibits such limitation. Some * + * jurisdictions do not allow the exclusion or limitation of * + * incidental or consequential damages, so this exclusion and * + * limitation may not apply to You. * + * * + ************************************************************************ + + 8. Litigation + ------------- + + Any litigation relating to this License may be brought only in the + courts of a jurisdiction where the defendant maintains its principal + place of business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. + Nothing in this Section shall prevent a party's ability to bring + cross-claims or counter-claims. + + 9. Miscellaneous + ---------------- + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides + that the language of a contract shall be construed against the drafter + shall not be used to construe this License against a Contributor. + + 10. Versions of the License + --------------------------- + + 10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + + 10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + + 10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + + 10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses + + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + + +### net.sourceforge.htmlunit:neko-htmlunit + +Apache 2.0 + +https://github.com/HtmlUnit/htmlunit-neko + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +### nu.validator:jing + +BSD-3-Clause + +http://www.thaiopensource.com/relaxng/jing.html + + Copyright (c) 2001-2003 Thai Open Source Software Center Ltd + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the Thai Open Source Software Center Ltd nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + +### org.nokogiri:nekodtd + +Apache 2.0 + +https://github.com/sparklemotion/nekodtd + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +### xalan:serializer and xalan:xalan + +Apache 2.0 + +https://xml.apache.org/xalan-j/ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +### xerces:xercesImpl + +Apache 2.0 + +https://xerces.apache.org/xerces2-j/ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +### xml-apis:xml-apis + +Apache 2.0 + +https://xerces.apache.org/xml-commons/ + + Unless otherwise noted all files in XML Commons are covered under the + Apache License Version 2.0. Please read the LICENSE and NOTICE files. + + XML Commons contains some software and documentation that is covered + under a number of different licenses. This applies particularly to the + xml-commons/java/external/ directory. Most files under + xml-commons/java/external/ are covered under their respective + LICENSE.*.txt files; see the matching README.*.txt files for + descriptions. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE.md b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE.md new file mode 100644 index 000000000..b649dd875 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License + +Copyright 2008 -- 2023 by Mike Dalessio, Aaron Patterson, Yoko Harada, Akinori MUSHA, John Shahid, Karol Bucek, Sam Ruby, Craig Barnes, Stephen Checkoway, Lars Kanis, Sergio Arbeo, Timothy Elliott, Nobuyoshi Nakada, Charles Nutter, Patrick Mahoney. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/README.md b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/README.md new file mode 100644 index 000000000..df466f4a8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/README.md @@ -0,0 +1,293 @@ +
+ +# Nokogiri + +Nokogiri (鋸) makes it easy and painless to work with XML and HTML from Ruby. It provides a sensible, easy-to-understand API for [reading](https://nokogiri.org/tutorials/parsing_an_html_xml_document.html), writing, [modifying](https://nokogiri.org/tutorials/modifying_an_html_xml_document.html), and [querying](https://nokogiri.org/tutorials/searching_a_xml_html_document.html) documents. It is fast and standards-compliant by relying on native parsers like libxml2, libgumbo, and xerces. + +## Guiding Principles + +Some guiding principles Nokogiri tries to follow: + +- be secure-by-default by treating all documents as **untrusted** by default +- be a **thin-as-reasonable layer** on top of the underlying parsers, and don't attempt to fix behavioral differences between the parsers + + +## Features Overview + +- DOM Parser for XML, HTML4, and HTML5 +- SAX Parser for XML and HTML4 +- Push Parser for XML and HTML4 +- Document search via XPath 1.0 +- Document search via CSS3 selectors, with some jquery-like extensions +- XSD Schema validation +- XSLT transformation +- "Builder" DSL for XML and HTML documents + + +## Status + +[![Github Actions CI](https://github.com/sparklemotion/nokogiri/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/sparklemotion/nokogiri/actions/workflows/ci.yml) +[![Appveyor CI](https://ci.appveyor.com/api/projects/status/xj2pqwvlxwuwgr06/branch/main?svg=true)](https://ci.appveyor.com/project/flavorjones/nokogiri/branch/main) + +[![Gem Version](https://badge.fury.io/rb/nokogiri.svg)](https://rubygems.org/gems/nokogiri) +[![SemVer compatibility](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nokogiri&package-manager=bundler&previous-version=1.11.7&new-version=1.12.5)](https://docs.github.com/en/code-security/supply-chain-security/managing-vulnerabilities-in-your-projects-dependencies/about-dependabot-security-updates#about-compatibility-scores) + +[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/5344/badge)](https://bestpractices.coreinfrastructure.org/projects/5344) +[![Tidelift dependencies](https://tidelift.com/badges/package/rubygems/nokogiri)](https://tidelift.com/subscription/pkg/rubygems-nokogiri?utm_source=rubygems-nokogiri&utm_medium=referral&utm_campaign=readme) + + +## Support, Getting Help, and Reporting Issues + +All official documentation is posted at https://nokogiri.org (the source for which is at https://github.com/sparklemotion/nokogiri.org/, and we welcome contributions). + +### Reading + +Your first stops for learning more about Nokogiri should be: + +- [API Documentation](https://nokogiri.org/rdoc/index.html) +- [Tutorials](https://nokogiri.org/tutorials/toc.html) +- An excellent community-maintained [Cheat Sheet](https://github.com/sparklemotion/nokogiri/wiki/Cheat-sheet) + + +### Ask For Help + +There are a few ways to ask exploratory questions: + +- The Nokogiri mailing list is active at https://groups.google.com/group/nokogiri-talk +- Open an issue using the "Help Request" template at https://github.com/sparklemotion/nokogiri/issues +- Open a discussion at https://github.com/sparklemotion/nokogiri/discussions + +Please do not mail the maintainers at their personal addresses. + + +### Report A Bug + +The Nokogiri bug tracker is at https://github.com/sparklemotion/nokogiri/issues + +Please use the "Bug Report" or "Installation Difficulties" templates. + + +### Security and Vulnerability Reporting + +Please report vulnerabilities at https://hackerone.com/nokogiri + +Full information and description of our security policy is in [`SECURITY.md`](SECURITY.md) + + +### Semantic Versioning Policy + +Nokogiri follows [Semantic Versioning](https://semver.org/) (since 2017 or so). [![Dependabot's SemVer compatibility score for Nokogiri](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nokogiri&package-manager=bundler&previous-version=1.11.7&new-version=1.12.5)](https://docs.github.com/en/code-security/supply-chain-security/managing-vulnerabilities-in-your-projects-dependencies/about-dependabot-security-updates#about-compatibility-scores) + +We bump `Major.Minor.Patch` versions following this guidance: + +`Major`: (we've never done this) + +- Significant backwards-incompatible changes to the public API that would require rewriting existing application code. +- Some examples of backwards-incompatible changes we might someday consider for a Major release are at [`ROADMAP.md`](ROADMAP.md). + +`Minor`: + +- Features and bugfixes. +- Updating packaged libraries for non-security-related reasons. +- Dropping support for EOLed Ruby versions. [Some folks find this objectionable](https://github.com/sparklemotion/nokogiri/issues/1568), but [SemVer says this is OK if the public API hasn't changed](https://semver.org/#what-should-i-do-if-i-update-my-own-dependencies-without-changing-the-public-api). +- Backwards-incompatible changes to internal or private methods and constants. These are detailed in the "Changes" section of each changelog entry. +- Removal of deprecated methods or parameters, after a generous transition period; usually when those methods or parameters are rarely-used or dangerous to the user. Essentially, removals that do not justify a major version bump. + + +`Patch`: + +- Bugfixes. +- Security updates. +- Updating packaged libraries for security-related reasons. + + +### Sponsorship + +You can help sponsor the maintainers of this software through one of these organizations: + +- [github.com/sponsors/flavorjones](https://github.com/sponsors/flavorjones) +- [opencollective.com/nokogiri](https://opencollective.com/nokogiri) +- [tidelift.com/subscription/pkg/rubygems-nokogiri](https://tidelift.com/subscription/pkg/rubygems-nokogiri?utm_source=rubygems-nokogiri&utm_medium=referral&utm_campaign=readme) + + +## Installation + +Requirements: + +- Ruby >= 3.1 +- JRuby >= 9.4.0.0 + +If you are compiling the native extension against a system version of libxml2: + +- libxml2 >= 2.9.2 (recommended >= 2.12.0) + + +### Native Gems: Faster, more reliable installation + +"Native gems" contain pre-compiled libraries for a specific machine architecture. On supported platforms, this removes the need for compiling the C extension and the packaged libraries, or for system dependencies to exist. This results in **much faster installation** and **more reliable installation**, which as you probably know are the biggest headaches for Nokogiri users. + +### Supported Platforms + +Nokogiri ships pre-compiled, "native" gems for the following platforms: + +- Linux: + - `x86_64-linux-gnu`, `aarch64-linux-gnu`, and `arm-linux-gnu` (req: `glibc >= 2.29`) + - `x86_64-linux-musl`, `aarch64-linux-musl`, and `arm-linux-musl` +- Darwin/MacOS: `x86_64-darwin` and `arm64-darwin` +- Windows: `x64-mingw-ucrt` +- Java: any platform running JRuby 9.4 or higher + +To determine whether your system supports one of these gems, look at the output of `bundle platform` or `ruby -e 'puts Gem::Platform.local.to_s'`. + +If you're on a supported platform, either `gem install` or `bundle install` should install a native gem without any additional action on your part. This installation should only take a few seconds, and your output should look something like: + +``` sh +$ gem install nokogiri +Fetching nokogiri-1.11.0-x86_64-linux.gem +Successfully installed nokogiri-1.11.0-x86_64-linux +1 gem installed +``` + + +### Other Installation Options + +Because Nokogiri is a C extension, it requires that you have a C compiler toolchain, Ruby development header files, and some system dependencies installed. + +The following may work for you if you have an appropriately-configured system: + +``` bash +gem install nokogiri +``` + +If you have any issues, please visit [Installing Nokogiri](https://nokogiri.org/tutorials/installing_nokogiri.html) for more complete instructions and troubleshooting. + + +## How To Use Nokogiri + +Nokogiri is a large library, and so it's challenging to briefly summarize it. We've tried to provide long, real-world examples at [Tutorials](https://nokogiri.org/tutorials/toc.html). + +### Parsing and Querying + +Here is example usage for parsing and querying a document: + +```ruby +#! /usr/bin/env ruby + +require 'nokogiri' +require 'open-uri' + +# Fetch and parse HTML document +doc = Nokogiri::HTML(URI.open('https://nokogiri.org/tutorials/installing_nokogiri.html')) + +# Search for nodes by css +doc.css('nav ul.menu li a', 'article h2').each do |link| + puts link.content +end + +# Search for nodes by xpath +doc.xpath('//nav//ul//li/a', '//article//h2').each do |link| + puts link.content +end + +# Or mix and match +doc.search('nav ul.menu li a', '//article//h2').each do |link| + puts link.content +end +``` + + +### Encoding + +Strings are always stored as UTF-8 internally. Methods that return +text values will always return UTF-8 encoded strings. Methods that +return a string containing markup (like `to_xml`, `to_html` and +`inner_html`) will return a string encoded like the source document. + +__WARNING__ + +Some documents declare one encoding, but actually use a different +one. In these cases, which encoding should the parser choose? + +Data is just a stream of bytes. Humans add meaning to that stream. Any +particular set of bytes could be valid characters in multiple +encodings, so detecting encoding with 100% accuracy is not +possible. `libxml2` does its best, but it can't be right all the time. + +If you want Nokogiri to handle the document encoding properly, your +best bet is to explicitly set the encoding. Here is an example of +explicitly setting the encoding to EUC-JP on the parser: + +```ruby + doc = Nokogiri.XML('', nil, 'EUC-JP') +``` + + +## Technical Overview + +### Guiding Principles + +As noted above, two guiding principles of the software are: + +- be secure-by-default by treating all documents as **untrusted** by default +- be a **thin-as-reasonable layer** on top of the underlying parsers, and don't attempt to fix behavioral differences between the parsers + +Notably, despite all parsers being standards-compliant, there are behavioral inconsistencies between the parsers used in the CRuby and JRuby implementations, and Nokogiri does not and should not attempt to remove these inconsistencies. Instead, we surface these differences in the test suite when they are important/semantic; or we intentionally write tests to depend only on the important/semantic bits (omitting whitespace from regex matchers on results, for example). + + +### CRuby + +The Ruby (a.k.a., CRuby, MRI, YARV) implementation is a C extension that depends on libxml2 and libxslt (which in turn depend on zlib and possibly libiconv). + +These dependencies are met by default by Nokogiri's packaged versions of the libxml2 and libxslt source code, but a configuration option `--use-system-libraries` is provided to allow specification of alternative library locations. See [Installing Nokogiri](https://nokogiri.org/tutorials/installing_nokogiri.html) for full documentation. + +We provide native gems by pre-compiling libxml2 and libxslt (and potentially zlib and libiconv) and packaging them into the gem file. In this case, no compilation is necessary at installation time, which leads to faster and more reliable installation. + +See [`LICENSE-DEPENDENCIES.md`](LICENSE-DEPENDENCIES.md) for more information on which dependencies are provided in which native and source gems. + + +### JRuby + +The Java (a.k.a. JRuby) implementation is a Java extension that depends primarily on Xerces and NekoHTML for parsing, though additional dependencies are on `isorelax`, `nekodtd`, `jing`, `serializer`, `xalan-j`, and `xml-apis`. + +These dependencies are provided by pre-compiled jar files packaged in the `java` platform gem. + +See [`LICENSE-DEPENDENCIES.md`](LICENSE-DEPENDENCIES.md) for more information on which dependencies are provided in which native and source gems. + + +## Contributing + +See [`CONTRIBUTING.md`](CONTRIBUTING.md) for an intro guide to developing Nokogiri. + + +## Code of Conduct + +We've adopted the Contributor Covenant code of conduct, which you can read in full in [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md). + + +## License + +This project is licensed under the terms of the MIT license. + +See this license at [`LICENSE.md`](LICENSE.md). + + +### Dependencies + +Some additional libraries may be distributed with your version of Nokogiri. Please see [`LICENSE-DEPENDENCIES.md`](LICENSE-DEPENDENCIES.md) for a discussion of the variations as well as the licenses thereof. + + +## Authors + +- Mike Dalessio +- Aaron Patterson +- Yoko Harada +- Akinori MUSHA +- John Shahid +- Karol Bucek +- Sam Ruby +- Craig Barnes +- Stephen Checkoway +- Lars Kanis +- Sergio Arbeo +- Timothy Elliott +- Nobuyoshi Nakada diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/dependencies.yml b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/dependencies.yml new file mode 100644 index 000000000..276d991b9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/dependencies.yml @@ -0,0 +1,42 @@ +--- +libxml2: + version: "2.13.8" + sha256: "277294cb33119ab71b2bc81f2f445e9bc9435b893ad15bb2cd2b0e859a0ee84a" + # sha-256 hash provided in https://download.gnome.org/sources/libxml2/2.13/libxml2-2.13.8.sha256sum + +libxslt: + version: "1.1.43" + sha256: "5a3d6b383ca5afc235b171118e90f5ff6aa27e9fea3303065231a6d403f0183a" + # sha-256 hash provided in https://download.gnome.org/sources/libxslt/1.1/libxslt-1.1.43.sha256sum + +zlib: + version: "1.3.1" + sha256: "9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23" + # SHA-256 hash provided on http://zlib.net/ + +libiconv: + version: "1.17" + sha256: "8f74213b56238c85a50a5329f77e06198771e70dd9a739779f4c02f65d971313" + # signature verified by following this path: + # - release announced at https://savannah.gnu.org/forum/forum.php?forum_id=10175 + # - which links to https://savannah.gnu.org/users/haible as the releaser + # - which links to https://savannah.gnu.org/people/viewgpg.php?user_id=1871 as the gpg key + # + # So: + # - wget -q -O - https://savannah.gnu.org/people/viewgpg.php?user_id=1871 | gpg --import + # gpg: key F5BE8B267C6A406D: 1 signature not checked due to a missing key + # gpg: key F5BE8B267C6A406D: public key "Bruno Haible (Open Source Development) " imported + # gpg: Total number processed: 1 + # gpg: imported: 1 + # gpg: marginals needed: 3 completes needed: 1 trust model: pgp + # gpg: depth: 0 valid: 4 signed: 0 trust: 0-, 0q, 0n, 0m, 0f, 4u + # gpg: next trustdb check due at 2024-05-09 + # - gpg --verify libiconv-1.17.tar.gz.sig ports/archives/libiconv-1.17.tar.gz + # gpg: Signature made Sun 15 May 2022 11:26:42 AM EDT + # gpg: using RSA key 9001B85AF9E1B83DF1BDA942F5BE8B267C6A406D + # gpg: Good signature from "Bruno Haible (Open Source Development) " [unknown] + # gpg: WARNING: This key is not certified with a trusted signature! + # gpg: There is no indication that the signature belongs to the owner. + # Primary key fingerprint: 9001 B85A F9E1 B83D F1BD A942 F5BE 8B26 7C6A 406D + # + # And this sha256sum is calculated from that verified tarball. diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/depend b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/depend new file mode 100644 index 000000000..24f590886 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/depend @@ -0,0 +1,38 @@ +# -*-makefile-*- +# DO NOT DELETE + +gumbo.o: $(srcdir)/nokogiri.h +html_document.o: $(srcdir)/nokogiri.h +html_element_description.o: $(srcdir)/nokogiri.h +html_entity_lookup.o: $(srcdir)/nokogiri.h +html_sax_parser_context.o: $(srcdir)/nokogiri.h +html_sax_push_parser.o: $(srcdir)/nokogiri.h +libxml2_backwards_compat.o: $(srcdir)/nokogiri.h +nokogiri.o: $(srcdir)/nokogiri.h +test_global_handlers.o: $(srcdir)/nokogiri.h +xml_attr.o: $(srcdir)/nokogiri.h +xml_attribute_decl.o: $(srcdir)/nokogiri.h +xml_cdata.o: $(srcdir)/nokogiri.h +xml_comment.o: $(srcdir)/nokogiri.h +xml_document.o: $(srcdir)/nokogiri.h +xml_document_fragment.o: $(srcdir)/nokogiri.h +xml_dtd.o: $(srcdir)/nokogiri.h +xml_element_content.o: $(srcdir)/nokogiri.h +xml_element_decl.o: $(srcdir)/nokogiri.h +xml_encoding_handler.o: $(srcdir)/nokogiri.h +xml_entity_decl.o: $(srcdir)/nokogiri.h +xml_entity_reference.o: $(srcdir)/nokogiri.h +xml_namespace.o: $(srcdir)/nokogiri.h +xml_node.o: $(srcdir)/nokogiri.h +xml_node_set.o: $(srcdir)/nokogiri.h +xml_processing_instruction.o: $(srcdir)/nokogiri.h +xml_reader.o: $(srcdir)/nokogiri.h +xml_relax_ng.o: $(srcdir)/nokogiri.h +xml_sax_parser.o: $(srcdir)/nokogiri.h +xml_sax_parser_context.o: $(srcdir)/nokogiri.h +xml_sax_push_parser.o: $(srcdir)/nokogiri.h +xml_schema.o: $(srcdir)/nokogiri.h +xml_syntax_error.o: $(srcdir)/nokogiri.h +xml_text.o: $(srcdir)/nokogiri.h +xml_xpath_context.o: $(srcdir)/nokogiri.h +xslt_stylesheet.o: $(srcdir)/nokogiri.h diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/extconf.rb b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/extconf.rb new file mode 100644 index 000000000..844129ed6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/extconf.rb @@ -0,0 +1,1165 @@ +# frozen_string_literal: true + +# rubocop:disable Style/GlobalVars + +ENV["RC_ARCHS"] = "" if RUBY_PLATFORM.include?("darwin") + +require "mkmf" +require "rbconfig" +require "fileutils" +require "shellwords" +require "pathname" + +# helpful constants +PACKAGE_ROOT_DIR = File.expand_path(File.join(File.dirname(__FILE__), "..", "..")) +REQUIRED_LIBXML_VERSION = "2.9.2" +RECOMMENDED_LIBXML_VERSION = "2.12.0" + +REQUIRED_MINI_PORTILE_VERSION = "~> 2.8.2" # keep this version in sync with the one in the gemspec +REQUIRED_PKG_CONFIG_VERSION = "~> 1.1" + +# Keep track of what versions of what libraries we build against +OTHER_LIBRARY_VERSIONS = {} + +NOKOGIRI_HELP_MESSAGE = <<~HELP + USAGE: ruby #{$PROGRAM_NAME} [options] + + Flags that are always valid: + + --use-system-libraries + --enable-system-libraries + Use system libraries instead of building and using the packaged libraries. + + --disable-system-libraries + Use the packaged libraries, and ignore the system libraries. This is the default on most + platforms, and overrides `--use-system-libraries` and the environment variable + `NOKOGIRI_USE_SYSTEM_LIBRARIES`. + + --disable-clean + Do not clean out intermediate files after successful build. + + --prevent-strip + Take steps to prevent stripping the symbol table and debugging info from the shared + library, potentially overriding RbConfig's CFLAGS/LDFLAGS/DLDFLAGS. + + + Flags only used when using system libraries: + + General: + + --with-opt-dir=DIRECTORY + Look for headers and libraries in DIRECTORY. + + --with-opt-lib=DIRECTORY + Look for libraries in DIRECTORY. + + --with-opt-include=DIRECTORY + Look for headers in DIRECTORY. + + + Related to libxml2: + + --with-xml2-dir=DIRECTORY + Look for xml2 headers and library in DIRECTORY. + + --with-xml2-lib=DIRECTORY + Look for xml2 library in DIRECTORY. + + --with-xml2-include=DIRECTORY + Look for xml2 headers in DIRECTORY. + + --with-xml2-source-dir=DIRECTORY + (dev only) Build libxml2 from the source code in DIRECTORY + + --disable-xml2-legacy + Do not build libxml2 with zlib, liblzma, or HTTP support. This will become the default + in a future version of Nokogiri. + + + Related to libxslt: + + --with-xslt-dir=DIRECTORY + Look for xslt headers and library in DIRECTORY. + + --with-xslt-lib=DIRECTORY + Look for xslt library in DIRECTORY. + + --with-xslt-include=DIRECTORY + Look for xslt headers in DIRECTORY. + + --with-xslt-source-dir=DIRECTORY + (dev only) Build libxslt from the source code in DIRECTORY + + + Related to libexslt: + + --with-exslt-dir=DIRECTORY + Look for exslt headers and library in DIRECTORY. + + --with-exslt-lib=DIRECTORY + Look for exslt library in DIRECTORY. + + --with-exslt-include=DIRECTORY + Look for exslt headers in DIRECTORY. + + + Related to iconv: + + --with-iconv-dir=DIRECTORY + Look for iconv headers and library in DIRECTORY. + + --with-iconv-lib=DIRECTORY + Look for iconv library in DIRECTORY. + + --with-iconv-include=DIRECTORY + Look for iconv headers in DIRECTORY. + + + Related to zlib (ignored if `--disable-xml2-legacy` is used): + + --with-zlib-dir=DIRECTORY + Look for zlib headers and library in DIRECTORY. + + --with-zlib-lib=DIRECTORY + Look for zlib library in DIRECTORY. + + --with-zlib-include=DIRECTORY + Look for zlib headers in DIRECTORY. + + + Flags only used when building and using the packaged libraries: + + --disable-static + Do not statically link packaged libraries, instead use shared libraries. + + --enable-cross-build + Enable cross-build mode. (You probably do not want to set this manually.) + + + Environment variables used: + + NOKOGIRI_USE_SYSTEM_LIBRARIES + Equivalent to `--enable-system-libraries` when set, even if nil or blank. + + AR + Use this path to invoke the library archiver instead of `RbConfig::CONFIG['AR']` + + CC + Use this path to invoke the compiler instead of `RbConfig::CONFIG['CC']` + + CPPFLAGS + If this string is accepted by the C preprocessor, add it to the flags passed to the C preprocessor + + CFLAGS + If this string is accepted by the compiler, add it to the flags passed to the compiler + + LD + Use this path to invoke the linker instead of `RbConfig::CONFIG['LD']` + + LDFLAGS + If this string is accepted by the linker, add it to the flags passed to the linker + + LIBS + Add this string to the flags passed to the linker +HELP + +# +# utility functions +# +def config_clean? + enable_config("clean", true) +end + +def config_static? + default_static = !truffle? + enable_config("static", default_static) +end + +def config_cross_build? + enable_config("cross-build") +end + +def config_system_libraries? + enable_config("system-libraries", ENV.key?("NOKOGIRI_USE_SYSTEM_LIBRARIES")) do |_, default| + arg_config("--use-system-libraries", default) + end +end + +def config_with_xml2_legacy? + enable_config("xml2-legacy", true) +end + +def windows? + RbConfig::CONFIG["target_os"].match?(/mingw|mswin/) +end + +def solaris? + RbConfig::CONFIG["target_os"].include?("solaris") +end + +def darwin? + RbConfig::CONFIG["target_os"].include?("darwin") +end + +def openbsd? + RbConfig::CONFIG["target_os"].include?("openbsd") +end + +def aix? + RbConfig::CONFIG["target_os"].include?("aix") +end + +def unix? + !(windows? || solaris? || darwin?) +end + +def nix? + ENV.key?("NIX_CC") +end + +def truffle? + RUBY_ENGINE == "truffleruby" +end + +def concat_flags(*args) + args.compact.join(" ") +end + +def local_have_library(lib, func = nil, headers = nil) + have_library(lib, func, headers) || have_library("lib#{lib}", func, headers) +end + +def zlib_source(version_string) + # As of 2022-12, I'm starting to see failed downloads often enough from zlib.net that I want to + # change the default to github. + if ENV["NOKOGIRI_USE_CANONICAL_ZLIB_SOURCE"] + "https://zlib.net/fossils/zlib-#{version_string}.tar.gz" + else + "https://github.com/madler/zlib/releases/download/v#{version_string}/zlib-#{version_string}.tar.gz" + end +end + +def gnome_source + "https://download.gnome.org" +end + +LOCAL_PACKAGE_RESPONSE = Object.new +def LOCAL_PACKAGE_RESPONSE.%(package) + package ? "yes: #{package}" : "no" +end + +# wrapper around MakeMakefil#pkg_config and the PKGConfig gem +def try_package_configuration(pc) + unless ENV.key?("NOKOGIRI_TEST_PKG_CONFIG_GEM") + # try MakeMakefile#pkg_config, which uses the system utility `pkg-config`. + return if checking_for("#{pc} using `pkg_config`", LOCAL_PACKAGE_RESPONSE) do + pkg_config(pc) + end + end + + # `pkg-config` probably isn't installed, which appears to be the case for lots of freebsd systems. + # let's fall back to the pkg-config gem, which knows how to parse .pc files, and wrap it with the + # same logic as MakeMakefile#pkg_config + begin + require "rubygems" + gem("pkg-config", REQUIRED_PKG_CONFIG_VERSION) + require "pkg-config" + + checking_for("#{pc} using pkg-config gem version #{PKGConfig::VERSION}", LOCAL_PACKAGE_RESPONSE) do + if PKGConfig.have_package(pc) + cflags = PKGConfig.cflags(pc) + ldflags = PKGConfig.libs_only_L(pc) + libs = PKGConfig.libs_only_l(pc) + + Logging.message("pkg-config gem found package configuration for %s\n", pc) + Logging.message("cflags: %s\nldflags: %s\nlibs: %s\n\n", cflags, ldflags, libs) + + [cflags, ldflags, libs] + end + end + rescue LoadError + message("Please install either the `pkg-config` utility or the `pkg-config` rubygem.\n") + end +end + +# set up mkmf to link against the library if we can find it +def have_package_configuration(opt: nil, pc: nil, lib:, func:, headers:) + if opt + dir_config(opt) + dir_config("opt") + end + + # see if we have enough path info to do this without trying any harder + unless ENV.key?("NOKOGIRI_TEST_PKG_CONFIG") + return true if local_have_library(lib, func, headers) + end + + try_package_configuration(pc) if pc + + # verify that we can compile and link against the library + local_have_library(lib, func, headers) +end + +def ensure_package_configuration(opt: nil, pc: nil, lib:, func:, headers:) + have_package_configuration(opt: opt, pc: pc, lib: lib, func: func, headers: headers) || + abort_could_not_find_library(lib) +end + +def ensure_func(func, headers = nil) + have_func(func, headers) || abort_could_not_find_library(func) +end + +def preserving_globals + values = [$arg_config, $INCFLAGS, $CFLAGS, $CPPFLAGS, $LDFLAGS, $DLDFLAGS, $LIBPATH, $libs].map(&:dup) + yield +ensure + $arg_config, $INCFLAGS, $CFLAGS, $CPPFLAGS, $LDFLAGS, $DLDFLAGS, $LIBPATH, $libs = values +end + +def abort_could_not_find_library(lib) + callers = caller(1..2).join("\n") + abort("-----\n#{callers}\n#{lib} is missing. Please locate mkmf.log to investigate how it is failing.\n-----") +end + +def chdir_for_build(&block) + # When using rake-compiler-dock on Windows, the underlying Virtualbox shared + # folders don't support symlinks, but libiconv expects it for a build on + # Linux. We work around this limitation by using the temp dir for cooking. + build_dir = /mingw|mswin|cygwin/.match?(ENV["RCD_HOST_RUBY_PLATFORM"].to_s) ? "/tmp" : "." + Dir.chdir(build_dir, &block) +end + +def sh_export_path(path) + # because libxslt 1.1.29 configure.in uses AC_PATH_TOOL which treats ":" + # as a $PATH separator, we need to convert windows paths from + # + # C:/path/to/foo + # + # to + # + # /C/path/to/foo + # + # which is sh-compatible, in order to find things properly during + # configuration + return path unless windows? + + match = Regexp.new("^([A-Z]):(/.*)").match(path) + if match && match.length == 3 + return File.join("/", match[1], match[2]) + end + + path +end + +def libflag_to_filename(ldflag) + case ldflag + when /\A-l(.+)/ + "lib#{Regexp.last_match(1)}.#{$LIBEXT}" + end +end + +def have_libxml_headers?(version = nil) + source = if version.nil? + <<~SRC + #include + SRC + else + version_int = format("%d%2.2d%2.2d", *version.split(".")) + <<~SRC + #include + #if LIBXML_VERSION < #{version_int} + # error libxml2 is older than #{version} + #endif + SRC + end + + try_cpp(source) +end + +def try_link_iconv(using = nil) + checking_for(using ? "iconv using #{using}" : "iconv") do + ["", "-liconv"].any? do |opt| + preserving_globals do + yield if block_given? + + try_link(<<~SRC, opt) + #include + #include + int main(void) + { + iconv_t cd = iconv_open("", ""); + iconv(cd, NULL, NULL, NULL, NULL); + return EXIT_SUCCESS; + } + SRC + end + end + end +end + +def iconv_configure_flags + # give --with-iconv-dir and --with-opt-dir first priority + ["iconv", "opt"].each do |target| + config = preserving_globals { dir_config(target) } + next unless config.any? && try_link_iconv("--with-#{target}-* flags") { dir_config(target) } + + idirs, ldirs = config.map do |dirs| + Array(dirs).flat_map do |dir| + dir.split(File::PATH_SEPARATOR) + end if dirs + end + + return [ + "--with-iconv=yes", + *("CPPFLAGS=#{idirs.map { |dir| "-I" + dir }.join(" ")}" if idirs), + *("LDFLAGS=#{ldirs.map { |dir| "-L" + dir }.join(" ")}" if ldirs), + ] + end + + if try_link_iconv + return ["--with-iconv=yes"] + end + + config = preserving_globals { pkg_config("libiconv") } + if config && try_link_iconv("pkg-config libiconv") { pkg_config("libiconv") } + cflags, ldflags, libs = config + + return [ + "--with-iconv=yes", + "CPPFLAGS=#{cflags}", + "LDFLAGS=#{ldflags}", + "LIBS=#{libs}", + ] + end + + abort_could_not_find_library("libiconv") +end + +def process_recipe(name, version, static_p, cross_p, cacheable_p = true) + require "rubygems" + gem("mini_portile2", REQUIRED_MINI_PORTILE_VERSION) # gemspec is not respected at install time + require "mini_portile2" + message("Using mini_portile version #{MiniPortile::VERSION}\n") + + unless ["libxml2", "libxslt"].include?(name) + OTHER_LIBRARY_VERSIONS[name] = version + end + + MiniPortile.new(name, version).tap do |recipe| + def recipe.port_path + "#{@target}/#{RUBY_PLATFORM}/#{@name}/#{@version}" + end + + # We use 'host' to set compiler prefix for cross-compiling. Prefer host_alias over host. And + # prefer i686 (what external dev tools use) to i386 (what ruby's configure.ac emits). + recipe.host = RbConfig::CONFIG["host_alias"].empty? ? RbConfig::CONFIG["host"] : RbConfig::CONFIG["host_alias"] + recipe.host = recipe.host.gsub("i386", "i686") + + recipe.target = File.join(PACKAGE_ROOT_DIR, "ports") if cacheable_p + recipe.configure_options << "--libdir=#{File.join(recipe.path, "lib")}" + + yield recipe + + env = Hash.new do |hash, key| + hash[key] = (ENV[key]).to_s + end + + recipe.configure_options.flatten! + + recipe.configure_options.delete_if do |option| + case option + when /\A(\w+)=(.*)\z/ + env[Regexp.last_match(1)] = if env.key?(Regexp.last_match(1)) + concat_flags(env[Regexp.last_match(1)], Regexp.last_match(2)) + else + Regexp.last_match(2) + end + true + else + false + end + end + + if static_p + recipe.configure_options += [ + "--disable-shared", + "--enable-static", + ] + env["CFLAGS"] = concat_flags(env["CFLAGS"], "-fPIC") + else + recipe.configure_options += [ + "--enable-shared", + "--disable-static", + ] + end + + if cross_p + recipe.configure_options += [ + "--target=#{recipe.host}", + "--host=#{recipe.host}", + ] + end + + if RbConfig::CONFIG["target_cpu"] == "universal" + ["CFLAGS", "LDFLAGS"].each do |key| + unless env[key].include?("-arch") + env[key] = concat_flags(env[key], RbConfig::CONFIG["ARCH_FLAG"]) + end + end + end + + recipe.configure_options += env.map do |key, value| + "#{key}=#{value.strip}" + end + + checkpoint = "#{recipe.target}/#{recipe.name}-#{recipe.version}-#{RUBY_PLATFORM}.installed" + if File.exist?(checkpoint) && !recipe.source_directory + message("Building Nokogiri with a packaged version of #{name}-#{version}.\n") + else + message(<<~EOM) + ---------- IMPORTANT NOTICE ---------- + Building Nokogiri with a packaged version of #{name}-#{version}. + Configuration options: #{recipe.configure_options.shelljoin} + EOM + + unless recipe.patch_files.empty? + message("The following patches are being applied:\n") + + recipe.patch_files.each do |patch| + message(format(" - %s\n", File.basename(patch))) + end + end + + message(<<~EOM) if name != "libgumbo" + + The Nokogiri maintainers intend to provide timely security updates, but if + this is a concern for you and want to use your OS/distro system library + instead, then abort this installation process and install nokogiri as + instructed at: + + https://nokogiri.org/tutorials/installing_nokogiri.html#installing-using-standard-system-libraries + + EOM + + message(<<~EOM) if name == "libxml2" + Note, however, that nokogiri cannot guarantee compatibility with every + version of libxml2 that may be provided by OS/package vendors. + + EOM + + chdir_for_build { recipe.cook } + FileUtils.touch(checkpoint) + end + recipe.activate + end +end + +def copy_packaged_libraries_headers(to_path:, from_recipes:) + FileUtils.rm_rf(to_path, secure: true) + FileUtils.mkdir(to_path) + from_recipes.each do |recipe| + FileUtils.cp_r(Dir[File.join(recipe.path, "include/*")], to_path) + end +end + +def do_help + print(NOKOGIRI_HELP_MESSAGE) + exit!(0) +end + +def do_clean + root = Pathname(PACKAGE_ROOT_DIR) + pwd = Pathname(Dir.pwd) + + # Skip if this is a development work tree + unless (root + ".git").exist? + message("Cleaning files only used during build.\n") + + # (root + 'tmp') cannot be removed at this stage because + # nokogiri.so is yet to be copied to lib. + + # clean the ports build directory + Pathname.glob(pwd.join("tmp", "*", "ports")) do |dir| + FileUtils.rm_rf(dir, verbose: true) + end + + if config_static? + # ports installation can be safely removed if statically linked. + FileUtils.rm_rf(root + "ports", verbose: true) + else + FileUtils.rm_rf(root + "ports" + "archives", verbose: true) + end + end + + exit!(0) +end + +# In ruby 3.2, symbol resolution changed on Darwin, to introduce the `-bundle_loader` flag to +# resolve symbols against the ruby binary. +# +# This makes it challenging to build a single extension that works with both a ruby with +# `--enable-shared` and one with `--disable-shared. To work around that, we choose to add +# `-flat_namespace` to the link line (later in this file). +# +# The `-flat_namespace` line introduces its own behavior change, which is that (similar to on +# Linux), any symbols in the extension that are exported may now be resolved by shared libraries +# loaded by the Ruby process. Specifically, that means that libxml2 and libxslt, which are +# statically linked into the nokogiri bundle, will resolve (at runtime) to a system libxml2 loaded +# by Ruby on Darwin. And it appears that often Ruby on Darwin does indeed load the system libxml2, +# and that messes with our assumptions about whether we're running with a patched libxml2 or a +# vanilla libxml2. +# +# We choose to use `-load_hidden` in this case to prevent exporting those symbols from libxml2 and +# libxslt, which ensures that they will be resolved to the static libraries in the bundle. In other +# words, when we use `load_hidden`, what happens in the extension stays in the extension. +# +# See https://github.com/rake-compiler/rake-compiler-dock/issues/87 for more info. +# +# Anyway, this method is the logical bit to tell us when to turn on these workarounds. +def needs_darwin_linker_hack + config_cross_build? && + darwin? && + Gem::Requirement.new("~> 3.2").satisfied_by?(Gem::Version.new(RbConfig::CONFIG["ruby_version"].split("+").first)) +end + +# +# main +# +do_help if arg_config("--help") +do_clean if arg_config("--clean") + +if openbsd? && !config_system_libraries? + unless %x(#{ENV["CC"] || "/usr/bin/cc"} -v 2>&1).include?("clang") + (ENV["CC"] ||= find_executable("egcc")) || + abort("Please install gcc 4.9+ from ports using `pkg_add -v gcc`") + end + append_cppflags "-I/usr/local/include" +end + +if ENV["AR"] + RbConfig::CONFIG["AR"] = RbConfig::MAKEFILE_CONFIG["AR"] = ENV["AR"] +end + +if ENV["CC"] + RbConfig::CONFIG["CC"] = RbConfig::MAKEFILE_CONFIG["CC"] = ENV["CC"] +end + +if ENV["LD"] + RbConfig::CONFIG["LD"] = RbConfig::MAKEFILE_CONFIG["LD"] = ENV["LD"] +end + +# use same toolchain for libxml and libxslt +ENV["AR"] = RbConfig::CONFIG["AR"] +ENV["CC"] = RbConfig::CONFIG["CC"] +ENV["LD"] = RbConfig::CONFIG["LD"] + +if arg_config("--prevent-strip") + old_cflags = $CFLAGS.split.join(" ") + old_ldflags = $LDFLAGS.split.join(" ") + old_dldflags = $DLDFLAGS.split.join(" ") + $CFLAGS = $CFLAGS.split.reject { |flag| flag == "-s" }.join(" ") + $LDFLAGS = $LDFLAGS.split.reject { |flag| flag == "-s" }.join(" ") + $DLDFLAGS = $DLDFLAGS.split.reject { |flag| flag == "-s" }.join(" ") + puts "Prevent stripping by removing '-s' from $CFLAGS" if old_cflags != $CFLAGS + puts "Prevent stripping by removing '-s' from $LDFLAGS" if old_ldflags != $LDFLAGS + puts "Prevent stripping by removing '-s' from $DLDFLAGS" if old_dldflags != $DLDFLAGS +end + +# adopt environment config +append_cflags(ENV["CFLAGS"]) unless ENV["CFLAGS"].nil? +append_cppflags(ENV["CPPFLAGS"]) unless ENV["CPPFLAGS"].nil? +append_ldflags(ENV["LDFLAGS"]) unless ENV["LDFLAGS"].nil? +$LIBS = concat_flags($LIBS, ENV["LIBS"]) + +# libgumbo uses C90/C99 features, see #2302 +append_cflags(["-std=c99", "-Wno-declaration-after-statement"]) + +# gumbo html5 serialization is slower with O3, let's make sure we use O2 +append_cflags("-O2") + +# always include debugging information +append_cflags("-g") + +# we use at least one inline function in the C extension +append_cflags("-Winline") + +# good to have no matter what Ruby was compiled with +append_cflags("-Wmissing-noreturn") + +# check integer loss of precision. this flag won't generally work until Ruby 3.4. +# see https://bugs.ruby-lang.org/issues/20507 +append_cflags("-Wconversion") + +# handle clang variations, see #1101 +if darwin? + append_cflags("-Wno-error=unused-command-line-argument-hard-error-in-future") + append_cflags("-Wno-unknown-warning-option") +end + +# these tend to be noisy, but on occasion useful during development +# append_cflags(["-Wcast-qual", "-Wwrite-strings"]) + +# Add SDK-specific include path for macOS and brew versions before v2.2.12 (2020-04-08) [#1851, #1801] +macos_mojave_sdk_include_path = "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/libxml2" +if config_system_libraries? && darwin? && Dir.exist?(macos_mojave_sdk_include_path) && !nix? + append_cppflags("-I#{macos_mojave_sdk_include_path}") +end + +# Work around a character escaping bug in MSYS by passing an arbitrary double-quoted parameter to gcc. +# See https://sourceforge.net/p/mingw/bugs/2142 +append_cppflags(' "-Idummypath"') if windows? + +if config_system_libraries? + message "Building nokogiri using system libraries.\n" + if config_with_xml2_legacy? + ensure_package_configuration( + opt: "zlib", + pc: "zlib", + lib: "z", + headers: "zlib.h", + func: "gzdopen", + ) + end + ensure_package_configuration( + opt: "xml2", + pc: "libxml-2.0", + lib: "xml2", + headers: "libxml/parser.h", + func: "xmlParseDoc", + ) + ensure_package_configuration( + opt: "xslt", + pc: "libxslt", + lib: "xslt", + headers: "libxslt/xslt.h", + func: "xsltParseStylesheetDoc", + ) + ensure_package_configuration( + opt: "exslt", + pc: "libexslt", + lib: "exslt", + headers: "libexslt/exslt.h", + func: "exsltFuncRegister", + ) + + have_libxml_headers?(REQUIRED_LIBXML_VERSION) || + abort("ERROR: libxml2 version #{REQUIRED_LIBXML_VERSION} or later is required!") + have_libxml_headers?(RECOMMENDED_LIBXML_VERSION) || + warn("WARNING: libxml2 version #{RECOMMENDED_LIBXML_VERSION} or later is highly recommended, but proceeding anyway.") + +else + message "Building nokogiri using packaged libraries.\n" + + static_p = config_static? + message "Static linking is #{static_p ? "enabled" : "disabled"}.\n" + + cross_build_p = config_cross_build? + message "Cross build is #{cross_build_p ? "enabled" : "disabled"}.\n" + + if needs_darwin_linker_hack + append_ldflags("-Wl,-flat_namespace") + end + + require "yaml" + dependencies = YAML.load_file(File.join(PACKAGE_ROOT_DIR, "dependencies.yml")) + + dir_config("zlib") if config_with_xml2_legacy? + + if cross_build_p || windows? + if config_with_xml2_legacy? + zlib_recipe = process_recipe("zlib", dependencies["zlib"]["version"], static_p, cross_build_p) do |recipe| + recipe.files = [{ + url: zlib_source(recipe.version), + sha256: dependencies["zlib"]["sha256"], + }] + if windows? + class << recipe + attr_accessor :cross_build_p + + def configure + Dir.chdir(work_path) do + mk = File.read("win32/Makefile.gcc") + File.open("win32/Makefile.gcc", "wb") do |f| + f.puts "BINARY_PATH = #{path}/bin" + f.puts "LIBRARY_PATH = #{path}/lib" + f.puts "INCLUDE_PATH = #{path}/include" + mk.sub!(/^PREFIX\s*=\s*$/, "PREFIX = #{host}-") if cross_build_p + f.puts mk + end + end + end + + def configured? + Dir.chdir(work_path) do + !!(File.read("win32/Makefile.gcc") =~ /^BINARY_PATH/) + end + end + + def compile + execute("compile", "make -f win32/Makefile.gcc") + end + + def install + execute("install", "make -f win32/Makefile.gcc install") + end + end + recipe.cross_build_p = cross_build_p + else + class << recipe + def configure + env = {} + env["CFLAGS"] = concat_flags(ENV["CFLAGS"], "-fPIC", "-g") + env["CHOST"] = host + execute("configure", ["./configure", "--static", configure_prefix], { env: env }) + if darwin? + # needed as of zlib 1.2.13 + Dir.chdir(work_path) do + makefile = File.read("Makefile").gsub(/^AR=.*$/, "AR=#{host}-libtool") + File.open("Makefile", "w") { |m| m.write(makefile) } + end + end + end + end + end + end + end + + unless unix? + libiconv_recipe = process_recipe( + "libiconv", + dependencies["libiconv"]["version"], + static_p, + cross_build_p, + ) do |recipe| + recipe.files = [{ + url: "https://ftp.gnu.org/pub/gnu/libiconv/#{recipe.name}-#{recipe.version}.tar.gz", + sha256: dependencies["libiconv"]["sha256"], + }] + + # The libiconv configure script doesn't accept "arm64" host string but "aarch64" + recipe.host = recipe.host.gsub("arm64-apple-darwin", "aarch64-apple-darwin") + + cflags = concat_flags(ENV["CFLAGS"], "-O2", "-g") + + recipe.configure_options += [ + "--disable-dependency-tracking", + "CPPFLAGS=-Wall", + "CFLAGS=#{cflags}", + "CXXFLAGS=#{cflags}", + "LDFLAGS=", + ] + end + end + elsif darwin? && !have_header("iconv.h") + abort(<<~EOM.chomp) + ----- + The file "iconv.h" is missing in your build environment, + which means you haven't installed Xcode Command Line Tools properly. + + To install Command Line Tools, try running `xcode-select --install` on + terminal and follow the instructions. If it fails, open Xcode.app, + select from the menu "Xcode" - "Open Developer Tool" - "More Developer + Tools" to open the developer site, download the installer for your OS + version and run it. + ----- + EOM + end + + if zlib_recipe + append_cppflags("-I#{zlib_recipe.path}/include") + $LIBPATH = ["#{zlib_recipe.path}/lib"] | $LIBPATH + ensure_package_configuration( + opt: "zlib", + pc: "zlib", + lib: "z", + headers: "zlib.h", + func: "gzdopen", + ) + end + + if libiconv_recipe + append_cppflags("-I#{libiconv_recipe.path}/include") + $LIBPATH = ["#{libiconv_recipe.path}/lib"] | $LIBPATH + ensure_package_configuration( + opt: "iconv", + pc: "iconv", + lib: "iconv", + headers: "iconv.h", + func: "iconv_open", + ) + end + + libxml2_recipe = process_recipe("libxml2", dependencies["libxml2"]["version"], static_p, cross_build_p) do |recipe| + source_dir = arg_config("--with-xml2-source-dir") + if source_dir + recipe.source_directory = source_dir + else + minor_version = Gem::Version.new(recipe.version).segments.take(2).join(".") + recipe.files = [{ + url: "#{gnome_source}/sources/libxml2/#{minor_version}/#{recipe.name}-#{recipe.version}.tar.xz", + sha256: dependencies["libxml2"]["sha256"], + }] + recipe.patch_files = Dir[File.join(PACKAGE_ROOT_DIR, "patches", "libxml2", "*.patch")].sort + end + + cppflags = concat_flags(ENV["CPPFLAGS"]) + cflags = concat_flags(ENV["CFLAGS"], "-O2", "-g") + + if cross_build_p + cppflags = concat_flags(cppflags, "-DNOKOGIRI_PRECOMPILED_LIBRARIES") + end + + if config_with_xml2_legacy? + recipe.configure_options << "--with-legacy" + end + + if zlib_recipe + recipe.configure_options << "--with-zlib=#{zlib_recipe.path}" + end + + if libiconv_recipe + recipe.configure_options << "--with-iconv=#{libiconv_recipe.path}" + else + recipe.configure_options += iconv_configure_flags + end + + if darwin? && !cross_build_p + recipe.configure_options << "RANLIB=/usr/bin/ranlib" unless ENV.key?("RANLIB") + recipe.configure_options << "AR=/usr/bin/ar" unless ENV.key?("AR") + end + + if windows? + cflags = concat_flags(cflags, "-ULIBXML_STATIC", "-DIN_LIBXML") + end + + recipe.configure_options << if source_dir + "--config-cache" + else + "--disable-dependency-tracking" + end + + recipe.configure_options += [ + "--without-python", + "--without-readline", + "--with-c14n", + "--with-debug", + "--with-threads", + "CPPFLAGS=#{cppflags}", + "CFLAGS=#{cflags}", + ] + end + + libxslt_recipe = process_recipe("libxslt", dependencies["libxslt"]["version"], static_p, cross_build_p) do |recipe| + source_dir = arg_config("--with-xslt-source-dir") + if source_dir + recipe.source_directory = source_dir + else + minor_version = Gem::Version.new(recipe.version).segments.take(2).join(".") + recipe.files = [{ + url: "#{gnome_source}/sources/libxslt/#{minor_version}/#{recipe.name}-#{recipe.version}.tar.xz", + sha256: dependencies["libxslt"]["sha256"], + }] + recipe.patch_files = Dir[File.join(PACKAGE_ROOT_DIR, "patches", "libxslt", "*.patch")].sort + end + + cflags = concat_flags(ENV["CFLAGS"], "-O2", "-g") + + if darwin? && !cross_build_p + recipe.configure_options << "RANLIB=/usr/bin/ranlib" unless ENV.key?("RANLIB") + recipe.configure_options << "AR=/usr/bin/ar" unless ENV.key?("AR") + end + + if windows? + cflags = concat_flags(cflags, "-ULIBXSLT_STATIC", "-DIN_LIBXSLT") + cflags = concat_flags(cflags, "-ULIBEXSLT_STATIC", "-DIN_LIBEXSLT") + end + + recipe.configure_options << if source_dir + "--config-cache" + else + "--disable-dependency-tracking" + end + + recipe.configure_options += [ + "--without-python", + "--without-crypto", + "--with-debug", + "--with-libxml-prefix=#{sh_export_path(libxml2_recipe.path)}", + "CFLAGS=#{cflags}", + ] + end + + append_cppflags("-DNOKOGIRI_PACKAGED_LIBRARIES") + append_cppflags("-DNOKOGIRI_PRECOMPILED_LIBRARIES") if cross_build_p + + $libs = $libs.shellsplit.tap do |libs| + [libxml2_recipe, libxslt_recipe].each do |recipe| + libname = recipe.name[/\Alib(.+)\z/, 1] + config_basename = "#{libname}-config" + File.join(recipe.path, "bin", config_basename).tap do |config| + # call config scripts explicit with 'sh' for compat with Windows + cflags = %x(sh #{config} --cflags).strip + message("#{config_basename} cflags: #{cflags}\n") + $CPPFLAGS = concat_flags(cflags, $CPPFLAGS) # prepend + + %x(sh #{config} --libs).strip.shellsplit.each do |arg| + case arg + when /\A-L(.+)\z/ + # Prioritize ports' directories + $LIBPATH = if Regexp.last_match(1).start_with?(PACKAGE_ROOT_DIR + "/") + [Regexp.last_match(1)] | $LIBPATH + else + $LIBPATH | [Regexp.last_match(1)] + end + when /\A-l./ + libs.unshift(arg) + else + $LDFLAGS << " " << arg.shellescape + end + end + end + + patches_string = recipe.patch_files.map { |path| File.basename(path) }.join(" ") + append_cppflags(%[-DNOKOGIRI_#{recipe.name.upcase}_PATCHES="\\"#{patches_string}\\""]) + + case libname + when "xml2" + # xslt-config --libs or pkg-config libxslt --libs does not include + # -llzma, so we need to add it manually when linking statically. + if static_p && preserving_globals { local_have_library("lzma") } + # Add it at the end; GH #988 + libs << "-llzma" + end + when "xslt" + # xslt-config does not have a flag to emit options including + # -lexslt, so add it manually. + libs.unshift("-lexslt") + end + end + end.shelljoin + + if static_p + static_archive_ld_flag = needs_darwin_linker_hack ? ["-load_hidden"] : [] + $libs = $libs.shellsplit.map do |arg| + case arg + when "-lxml2" + static_archive_ld_flag + [File.join(libxml2_recipe.path, "lib", libflag_to_filename(arg))] + when "-lxslt", "-lexslt" + static_archive_ld_flag + [File.join(libxslt_recipe.path, "lib", libflag_to_filename(arg))] + else + arg + end + end.flatten.shelljoin + end + + ensure_func("xmlParseDoc", "libxml/parser.h") + ensure_func("xsltParseStylesheetDoc", "libxslt/xslt.h") + ensure_func("exsltFuncRegister", "libexslt/exslt.h") +end + +if arg_config("--gumbo-dev") + message("DEV MODE ENABLED: build libgumbo as packaged source") + ext_dir = File.dirname(__FILE__) + Dir.chdir(ext_dir) do + $srcs = Dir["*.c", "../../gumbo-parser/src/*.c"] + $hdrs = Dir["*.h", "../../gumbo-parser/src/*.h"] + end + $INCFLAGS << " -I$(srcdir)/../../gumbo-parser/src" + $VPATH << "$(srcdir)/../../gumbo-parser/src" + find_header("nokogiri_gumbo.h") || abort("nokogiri_gumbo.h not found") +else + libgumbo_recipe = process_recipe("libgumbo", "1.0.0-nokogiri", static_p, cross_build_p, false) do |recipe| + recipe.configure_options = [] + + class << recipe + def downloaded? + true + end + + def extract + target = File.join(tmp_path, "gumbo-parser") + output("Copying gumbo-parser files into #{target}...") + FileUtils.mkdir_p(target) + FileUtils.cp(Dir.glob(File.join(PACKAGE_ROOT_DIR, "gumbo-parser/src/*")), target) + end + + def configured? + true + end + + def install + lib_dir = File.join(port_path, "lib") + inc_dir = File.join(port_path, "include") + FileUtils.mkdir_p([lib_dir, inc_dir]) + FileUtils.cp(File.join(work_path, "libgumbo.a"), lib_dir) + FileUtils.cp(Dir.glob(File.join(work_path, "*.h")), inc_dir) + end + + def compile + cflags = concat_flags(ENV["CFLAGS"], "-fPIC", "-O2", "-g") + + env = { "CC" => gcc_cmd, "CFLAGS" => cflags } + if config_cross_build? + if host.include?("darwin") + env["AR"] = "#{host}-libtool" + env["ARFLAGS"] = "-o" + else + env["AR"] = "#{host}-ar" + end + env["RANLIB"] = "#{host}-ranlib" + if windows? + concat_flags(env["CFLAGS"], "-D_RUBY_UCRT") + end + end + + execute("compile", make_cmd, { env: env }) + end + end + end + append_cppflags("-I#{File.join(libgumbo_recipe.path, "include")}") + $libs = $libs + " " + File.join(libgumbo_recipe.path, "lib", "libgumbo.a") + $LIBPATH = $LIBPATH | [File.join(libgumbo_recipe.path, "lib")] + ensure_func("gumbo_parse_with_options", "nokogiri_gumbo.h") +end + +have_func("xmlCtxtSetOptions") # introduced in libxml2 2.13.0 +have_func("xmlCtxtGetOptions") # introduced in libxml2 2.14.0 +have_func("xmlSwitchEncodingName") # introduced in libxml2 2.13.0 +have_func("rb_category_warning") # introduced in Ruby 3.0 but had trouble resolving this symbol in truffleruby + +other_library_versions_string = OTHER_LIBRARY_VERSIONS.map { |k, v| [k, v].join(":") }.join(",") +append_cppflags(%[-DNOKOGIRI_OTHER_LIBRARY_VERSIONS="\\"#{other_library_versions_string}\\""]) + +unless config_system_libraries? + if cross_build_p + # When precompiling native gems, copy packaged libraries' headers to ext/nokogiri/include + # These are packaged up by the cross-compiling callback in the ExtensionTask + copy_packaged_libraries_headers( + to_path: File.join(PACKAGE_ROOT_DIR, "ext/nokogiri/include"), + from_recipes: [libxml2_recipe, libxslt_recipe], + ) + else + # When compiling during installation, install packaged libraries' header files into ext/nokogiri/include + copy_packaged_libraries_headers( + to_path: "include", + from_recipes: [libxml2_recipe, libxslt_recipe], + ) + $INSTALLFILES << ["include/**/*.h", "$(rubylibdir)"] + end +end + +create_makefile("nokogiri/nokogiri") + +if config_clean? + # Do not clean if run in a development work tree. + File.open("Makefile", "at") do |mk| + mk.print(<<~EOF) + + all: clean-ports + clean-ports: $(TARGET_SO) + \t-$(Q)$(RUBY) $(srcdir)/extconf.rb --clean --#{static_p ? "enable" : "disable"}-static + EOF + end +end + +# rubocop:enable Style/GlobalVars diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/gumbo.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/gumbo.c new file mode 100644 index 000000000..fd938f3c0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/gumbo.c @@ -0,0 +1,610 @@ +// +// Copyright 2013-2021 Sam Ruby, Stephen Checkoway +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// +// nokogumbo.c defines the following: +// +// class Nokogumbo +// def parse(utf8_string) # returns Nokogiri::HTML5::Document +// end +// +// Processing starts by calling gumbo_parse_with_options. The resulting document tree +// is then walked, a parallel libxml2 tree is constructed, and the final document is +// then wrapped using noko_xml_document_wrap. This approach reduces memory and CPU +// requirements as Ruby objects are only built when necessary. +// + +#include + +#include "nokogiri_gumbo.h" + +VALUE cNokogiriHtml5Document; + +// Interned symbols +static ID internal_subset; +static ID parent; + +#include +#include +#include + +// URI = system id +// external id = public id +static xmlDocPtr +new_html_doc(const char *dtd_name, const char *system, const char *public) +{ + // These two libxml2 functions take the public and system ids in + // opposite orders. + htmlDocPtr doc = htmlNewDocNoDtD(/* URI */ NULL, /* ExternalID */NULL); + assert(doc); + if (dtd_name) { + xmlCreateIntSubset(doc, (const xmlChar *)dtd_name, (const xmlChar *)public, (const xmlChar *)system); + } + return doc; +} + +static xmlNodePtr +get_parent(xmlNodePtr node) +{ + return node->parent; +} + +static GumboOutput * +perform_parse(const GumboOptions *options, VALUE input) +{ + assert(RTEST(input)); + Check_Type(input, T_STRING); + GumboOutput *output = gumbo_parse_with_options( + options, + RSTRING_PTR(input), + (size_t)RSTRING_LEN(input) + ); + + const char *status_string = gumbo_status_to_string(output->status); + switch (output->status) { + case GUMBO_STATUS_OK: + break; + case GUMBO_STATUS_TOO_MANY_ATTRIBUTES: + case GUMBO_STATUS_TREE_TOO_DEEP: + gumbo_destroy_output(output); + rb_raise(rb_eArgError, "%s", status_string); + case GUMBO_STATUS_OUT_OF_MEMORY: + gumbo_destroy_output(output); + rb_raise(rb_eNoMemError, "%s", status_string); + } + return output; +} + +static xmlNsPtr +lookup_or_add_ns( + xmlDocPtr doc, + xmlNodePtr root, + const char *href, + const char *prefix +) +{ + xmlNsPtr ns = xmlSearchNs(doc, root, (const xmlChar *)prefix); + if (ns) { + return ns; + } + return xmlNewNs(root, (const xmlChar *)href, (const xmlChar *)prefix); +} + +static void +set_line(xmlNodePtr node, size_t line) +{ + // libxml2 uses 65535 to mean look elsewhere for the line number on some + // nodes. + if (line < 65535) { + node->line = (unsigned short)line; + } +} + +// Construct an XML tree rooted at xml_output_node from the Gumbo tree rooted +// at gumbo_node. +static void +build_tree( + xmlDocPtr doc, + xmlNodePtr xml_output_node, + const GumboNode *gumbo_node +) +{ + xmlNodePtr xml_root = NULL; + xmlNodePtr xml_node = xml_output_node; + size_t child_index = 0; + + while (true) { + assert(gumbo_node != NULL); + const GumboVector *children = gumbo_node->type == GUMBO_NODE_DOCUMENT ? + &gumbo_node->v.document.children : &gumbo_node->v.element.children; + if (child_index >= children->length) { + // Move up the tree and to the next child. + if (xml_node == xml_output_node) { + // We've built as much of the tree as we can. + return; + } + child_index = gumbo_node->index_within_parent + 1; + gumbo_node = gumbo_node->parent; + xml_node = get_parent(xml_node); + // Children of fragments don't share the same root, so reset it and + // it'll be set below. In the non-fragment case, this will only happen + // after the html element has been finished at which point there are no + // further elements. + if (xml_node == xml_output_node) { + xml_root = NULL; + } + continue; + } + const GumboNode *gumbo_child = children->data[child_index++]; + xmlNodePtr xml_child; + + switch (gumbo_child->type) { + case GUMBO_NODE_DOCUMENT: + abort(); // Bug in Gumbo. + + case GUMBO_NODE_TEXT: + case GUMBO_NODE_WHITESPACE: + xml_child = xmlNewDocText(doc, (const xmlChar *)gumbo_child->v.text.text); + set_line(xml_child, gumbo_child->v.text.start_pos.line); + xmlAddChild(xml_node, xml_child); + break; + + case GUMBO_NODE_CDATA: + xml_child = xmlNewCDataBlock(doc, (const xmlChar *)gumbo_child->v.text.text, + (int) strlen(gumbo_child->v.text.text)); + set_line(xml_child, gumbo_child->v.text.start_pos.line); + xmlAddChild(xml_node, xml_child); + break; + + case GUMBO_NODE_COMMENT: + xml_child = xmlNewDocComment(doc, (const xmlChar *)gumbo_child->v.text.text); + set_line(xml_child, gumbo_child->v.text.start_pos.line); + xmlAddChild(xml_node, xml_child); + break; + + case GUMBO_NODE_TEMPLATE: + // XXX: Should create a template element and a new DocumentFragment + case GUMBO_NODE_ELEMENT: { + xml_child = xmlNewDocNode(doc, NULL, (const xmlChar *)gumbo_child->v.element.name, NULL); + set_line(xml_child, gumbo_child->v.element.start_pos.line); + if (xml_root == NULL) { + xml_root = xml_child; + } + xmlNsPtr ns = NULL; + switch (gumbo_child->v.element.tag_namespace) { + case GUMBO_NAMESPACE_HTML: + break; + case GUMBO_NAMESPACE_SVG: + ns = lookup_or_add_ns(doc, xml_root, "http://www.w3.org/2000/svg", "svg"); + break; + case GUMBO_NAMESPACE_MATHML: + ns = lookup_or_add_ns(doc, xml_root, "http://www.w3.org/1998/Math/MathML", "math"); + break; + } + if (ns != NULL) { + xmlSetNs(xml_child, ns); + } + xmlAddChild(xml_node, xml_child); + + // Add the attributes. + const GumboVector *attrs = &gumbo_child->v.element.attributes; + for (size_t i = 0; i < attrs->length; i++) { + const GumboAttribute *attr = attrs->data[i]; + + switch (attr->attr_namespace) { + case GUMBO_ATTR_NAMESPACE_XLINK: + ns = lookup_or_add_ns(doc, xml_root, "http://www.w3.org/1999/xlink", "xlink"); + break; + + case GUMBO_ATTR_NAMESPACE_XML: + ns = lookup_or_add_ns(doc, xml_root, "http://www.w3.org/XML/1998/namespace", "xml"); + break; + + case GUMBO_ATTR_NAMESPACE_XMLNS: + ns = lookup_or_add_ns(doc, xml_root, "http://www.w3.org/2000/xmlns/", "xmlns"); + break; + + default: + ns = NULL; + } + xmlNewNsProp(xml_child, ns, (const xmlChar *)attr->name, (const xmlChar *)attr->value); + } + + // Add children for this element. + child_index = 0; + gumbo_node = gumbo_child; + xml_node = xml_child; + } + } + } +} + +static void +add_errors(const GumboOutput *output, VALUE rdoc, VALUE input, VALUE url) +{ + const char *input_str = RSTRING_PTR(input); + size_t input_len = (size_t)RSTRING_LEN(input); + + // Add parse errors to rdoc. + if (output->errors.length) { + const GumboVector *errors = &output->errors; + VALUE rerrors = rb_ary_new2(errors->length); + + for (size_t i = 0; i < errors->length; i++) { + GumboError *err = errors->data[i]; + GumboSourcePosition position = gumbo_error_position(err); + char *msg; + size_t size = gumbo_caret_diagnostic_to_string(err, input_str, input_len, &msg); + VALUE err_str = rb_utf8_str_new(msg, (int)size); + free(msg); + VALUE syntax_error = rb_class_new_instance(1, &err_str, cNokogiriXmlSyntaxError); + const char *error_code = gumbo_error_code(err); + VALUE str1 = error_code ? rb_utf8_str_new_static(error_code, (int)strlen(error_code)) : Qnil; + rb_iv_set(syntax_error, "@domain", INT2NUM(1)); // XML_FROM_PARSER + rb_iv_set(syntax_error, "@code", INT2NUM(1)); // XML_ERR_INTERNAL_ERROR + rb_iv_set(syntax_error, "@level", INT2NUM(2)); // XML_ERR_ERROR + rb_iv_set(syntax_error, "@file", url); + rb_iv_set(syntax_error, "@line", SIZET2NUM(position.line)); + rb_iv_set(syntax_error, "@str1", str1); + rb_iv_set(syntax_error, "@str2", Qnil); + rb_iv_set(syntax_error, "@str3", Qnil); + rb_iv_set(syntax_error, "@int1", INT2NUM(0)); + rb_iv_set(syntax_error, "@column", SIZET2NUM(position.column)); + rb_ary_push(rerrors, syntax_error); + } + rb_iv_set(rdoc, "@errors", rerrors); + } +} + +typedef struct { + GumboOutput *output; + VALUE input; + VALUE url_or_frag; + VALUE klass; + xmlDocPtr doc; +} ParseArgs; + +static VALUE +parse_cleanup(VALUE parse_args) +{ + ParseArgs *args = (ParseArgs *)parse_args; + gumbo_destroy_output(args->output); + // Make sure garbage collection doesn't mark the objects as being live based + // on references from the ParseArgs. This may be unnecessary. + args->input = Qnil; + args->url_or_frag = Qnil; + if (args->doc != NULL) { + xmlFreeDoc(args->doc); + } + return Qnil; +} + +// Scan the keyword arguments for options common to the document and fragment +// parse. +static GumboOptions +common_options(VALUE kwargs) +{ + // The order of the keywords determines the order of the values below. + // If this order is changed, then setting the options below must change as + // well. + ID keywords[] = { + // Required keywords. + rb_intern_const("max_attributes"), + rb_intern_const("max_errors"), + rb_intern_const("max_tree_depth"), + + // Optional keywords. + rb_intern_const("parse_noscript_content_as_text"), + }; + VALUE values[sizeof keywords / sizeof keywords[0]]; + + // Extract the values coresponding to the required keywords. Raise an error + // if required arguments are missing. + rb_get_kwargs(kwargs, keywords, 3, 1, values); + + GumboOptions options = kGumboDefaultOptions; + options.max_attributes = NUM2INT(values[0]); + options.max_errors = NUM2INT(values[1]); + + // handle negative values + int depth = NUM2INT(values[2]); + options.max_tree_depth = depth < 0 ? UINT_MAX : (unsigned int)depth; + + options.parse_noscript_content_as_text = values[3] != Qundef && RTEST(values[3]); + + return options; +} + +static VALUE parse_continue(VALUE parse_args); + +/* + * @!visibility protected + */ +static VALUE +noko_gumbo_s_parse(int argc, VALUE *argv, VALUE _self) +{ + VALUE input, url, klass, kwargs; + + rb_scan_args(argc, argv, "3:", &input, &url, &klass, &kwargs); + if (NIL_P(kwargs)) { + kwargs = rb_hash_new(); + } + + GumboOptions options = common_options(kwargs); + + GumboOutput *output = perform_parse(&options, input); + ParseArgs args = { + .output = output, + .input = input, + .url_or_frag = url, + .klass = klass, + .doc = NULL, + }; + + return rb_ensure(parse_continue, (VALUE)(&args), parse_cleanup, (VALUE)(&args)); +} + +static VALUE +parse_continue(VALUE parse_args) +{ + ParseArgs *args = (ParseArgs *)parse_args; + GumboOutput *output = args->output; + xmlDocPtr doc; + if (output->document->v.document.has_doctype) { + const char *name = output->document->v.document.name; + const char *public = output->document->v.document.public_identifier; + const char *system = output->document->v.document.system_identifier; + public = public[0] ? public : NULL; + system = system[0] ? system : NULL; + doc = new_html_doc(name, system, public); + } else { + doc = new_html_doc(NULL, NULL, NULL); + } + args->doc = doc; // Make sure doc gets cleaned up if an error is thrown. + build_tree(doc, (xmlNodePtr)doc, output->document); + VALUE rdoc = noko_xml_document_wrap(args->klass, doc); + rb_iv_set(rdoc, "@url", args->url_or_frag); + rb_iv_set(rdoc, "@quirks_mode", INT2NUM(output->document->v.document.doc_type_quirks_mode)); + args->doc = NULL; // The Ruby runtime now owns doc so don't delete it. + add_errors(output, rdoc, args->input, args->url_or_frag); + return rdoc; +} + +static int +lookup_namespace(VALUE node, bool require_known_ns) +{ + ID namespace, href; + CONST_ID(namespace, "namespace"); + CONST_ID(href, "href"); + VALUE ns = rb_funcall(node, namespace, 0); + + if (NIL_P(ns)) { + return GUMBO_NAMESPACE_HTML; + } + ns = rb_funcall(ns, href, 0); + assert(RTEST(ns)); + Check_Type(ns, T_STRING); + + const char *href_ptr = RSTRING_PTR(ns); + size_t href_len = (size_t)RSTRING_LEN(ns); +#define NAMESPACE_P(uri) (href_len == sizeof uri - 1 && !memcmp(href_ptr, uri, href_len)) + if (NAMESPACE_P("http://www.w3.org/1999/xhtml")) { + return GUMBO_NAMESPACE_HTML; + } + if (NAMESPACE_P("http://www.w3.org/1998/Math/MathML")) { + return GUMBO_NAMESPACE_MATHML; + } + if (NAMESPACE_P("http://www.w3.org/2000/svg")) { + return GUMBO_NAMESPACE_SVG; + } +#undef NAMESPACE_P + if (require_known_ns) { + rb_raise(rb_eArgError, "Unexpected namespace URI \"%*s\"", (int)href_len, href_ptr); + } + return -1; +} + +static xmlNodePtr +extract_xml_node(VALUE node) +{ + xmlNodePtr xml_node; + Noko_Node_Get_Struct(node, xmlNode, xml_node); + return xml_node; +} + +static VALUE fragment_continue(VALUE parse_args); + +/* + * @!visibility protected + */ +static VALUE +noko_gumbo_s_fragment(int argc, VALUE *argv, VALUE _self) +{ + VALUE doc_fragment; + VALUE tags; + VALUE ctx; + VALUE kwargs; + ID name = rb_intern_const("name"); + const char *ctx_tag; + GumboNamespaceEnum ctx_ns; + GumboQuirksModeEnum quirks_mode; + bool form = false; + const char *encoding = NULL; + + rb_scan_args(argc, argv, "3:", &doc_fragment, &tags, &ctx, &kwargs); + if (NIL_P(kwargs)) { + kwargs = rb_hash_new(); + } + + GumboOptions options = common_options(kwargs); + + if (NIL_P(ctx)) { + ctx_tag = "body"; + ctx_ns = GUMBO_NAMESPACE_HTML; + } else if (TYPE(ctx) == T_STRING) { + ctx_tag = StringValueCStr(ctx); + ctx_ns = GUMBO_NAMESPACE_HTML; + size_t len = (size_t)RSTRING_LEN(ctx); + const char *colon = memchr(ctx_tag, ':', len); + if (colon) { + switch (colon - ctx_tag) { + case 3: + if (st_strncasecmp(ctx_tag, "svg", 3) != 0) { + goto error; + } + ctx_ns = GUMBO_NAMESPACE_SVG; + break; + case 4: + if (st_strncasecmp(ctx_tag, "html", 4) == 0) { + ctx_ns = GUMBO_NAMESPACE_HTML; + } else if (st_strncasecmp(ctx_tag, "math", 4) == 0) { + ctx_ns = GUMBO_NAMESPACE_MATHML; + } else { + goto error; + } + break; + default: +error: + rb_raise(rb_eArgError, "Invalid context namespace '%*s'", (int)(colon - ctx_tag), ctx_tag); + } + ctx_tag = colon + 1; + } else { + // For convenience, put 'svg' and 'math' in their namespaces. + if (len == 3 && st_strncasecmp(ctx_tag, "svg", 3) == 0) { + ctx_ns = GUMBO_NAMESPACE_SVG; + } else if (len == 4 && st_strncasecmp(ctx_tag, "math", 4) == 0) { + ctx_ns = GUMBO_NAMESPACE_MATHML; + } + } + + // Check if it's a form. + form = ctx_ns == GUMBO_NAMESPACE_HTML && st_strcasecmp(ctx_tag, "form") == 0; + } else { + ID element_ = rb_intern_const("element?"); + + // Context fragment name. + VALUE tag_name = rb_funcall(ctx, name, 0); + assert(RTEST(tag_name)); + Check_Type(tag_name, T_STRING); + ctx_tag = StringValueCStr(tag_name); + + // Context fragment namespace. + ctx_ns = lookup_namespace(ctx, true); + + // Check for a form ancestor, including self. + for (VALUE node = ctx; + !NIL_P(node); + node = rb_respond_to(node, parent) ? rb_funcall(node, parent, 0) : Qnil) { + if (!RTEST(rb_funcall(node, element_, 0))) { + continue; + } + VALUE element_name = rb_funcall(node, name, 0); + if (RSTRING_LEN(element_name) == 4 + && !st_strcasecmp(RSTRING_PTR(element_name), "form") + && lookup_namespace(node, false) == GUMBO_NAMESPACE_HTML) { + form = true; + break; + } + } + + // Encoding. + if (ctx_ns == GUMBO_NAMESPACE_MATHML + && RSTRING_LEN(tag_name) == 14 + && !st_strcasecmp(ctx_tag, "annotation-xml")) { + VALUE enc = rb_funcall(ctx, rb_intern_const("[]"), + 1, + rb_utf8_str_new_static("encoding", 8)); + if (RTEST(enc)) { + Check_Type(enc, T_STRING); + encoding = StringValueCStr(enc); + } + } + } + + // Quirks mode. + VALUE doc = rb_funcall(doc_fragment, rb_intern_const("document"), 0); + VALUE dtd = rb_funcall(doc, internal_subset, 0); + VALUE doc_quirks_mode = rb_iv_get(doc, "@quirks_mode"); + if (NIL_P(ctx) || (TYPE(ctx) == T_STRING) || NIL_P(doc_quirks_mode)) { + quirks_mode = GUMBO_DOCTYPE_NO_QUIRKS; + } else if (NIL_P(dtd)) { + quirks_mode = GUMBO_DOCTYPE_QUIRKS; + } else { + VALUE dtd_name = rb_funcall(dtd, name, 0); + VALUE pubid = rb_funcall(dtd, rb_intern_const("external_id"), 0); + VALUE sysid = rb_funcall(dtd, rb_intern_const("system_id"), 0); + quirks_mode = gumbo_compute_quirks_mode( + NIL_P(dtd_name) ? NULL : StringValueCStr(dtd_name), + NIL_P(pubid) ? NULL : StringValueCStr(pubid), + NIL_P(sysid) ? NULL : StringValueCStr(sysid) + ); + } + + // Perform a fragment parse. + options.fragment_context = ctx_tag; + options.fragment_namespace = ctx_ns; + options.fragment_encoding = encoding; + options.quirks_mode = quirks_mode; + options.fragment_context_has_form_ancestor = form; + + // Add one to the max tree depth to account for the HTML element. + if (options.max_tree_depth < UINT_MAX) { options.max_tree_depth++; } + + GumboOutput *output = perform_parse(&options, tags); + ParseArgs args = { + .output = output, + .input = tags, + .url_or_frag = doc_fragment, + .doc = (xmlDocPtr)extract_xml_node(doc), + }; + rb_ensure(fragment_continue, (VALUE)(&args), parse_cleanup, (VALUE)(&args)); + return Qnil; +} + +static VALUE +fragment_continue(VALUE parse_args) +{ + ParseArgs *args = (ParseArgs *)parse_args; + GumboOutput *output = args->output; + VALUE doc_fragment = args->url_or_frag; + xmlDocPtr xml_doc = args->doc; + + args->doc = NULL; // The Ruby runtime owns doc so make sure we don't delete it. + xmlNodePtr xml_frag = extract_xml_node(doc_fragment); + build_tree(xml_doc, xml_frag, output->root); + rb_iv_set(doc_fragment, "@quirks_mode", INT2NUM(output->document->v.document.doc_type_quirks_mode)); + add_errors(output, doc_fragment, args->input, rb_utf8_str_new_static("#fragment", 9)); + return Qnil; +} + +// Initialize the Nokogumbo class and fetch constants we will use later. +void +noko_init_gumbo(void) +{ + // Class constants. + cNokogiriHtml5Document = rb_define_class_under(mNokogiriHtml5, "Document", cNokogiriHtml4Document); + rb_gc_register_mark_object(cNokogiriHtml5Document); + + // Interned symbols. + internal_subset = rb_intern_const("internal_subset"); + parent = rb_intern_const("parent"); + + // Define Nokogumbo module with parse and fragment methods. + rb_define_singleton_method(mNokogiriGumbo, "parse", noko_gumbo_s_parse, -1); + rb_define_singleton_method(mNokogiriGumbo, "fragment", noko_gumbo_s_fragment, -1); +} + +// vim: set shiftwidth=2 softtabstop=2 tabstop=8 expandtab: diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_document.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_document.c new file mode 100644 index 000000000..e3e0ee084 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_document.c @@ -0,0 +1,171 @@ +#include + +VALUE cNokogiriHtml4Document ; + +static ID id_encoding_found; +static ID id_to_s; + +/* + * call-seq: + * new(uri=nil, external_id=nil) → HTML4::Document + * + * Create a new empty document with base URI +uri+ and external ID +external_id+. + */ +static VALUE +rb_html_document_s_new(int argc, VALUE *argv, VALUE klass) +{ + VALUE uri, external_id, rest, rb_doc; + htmlDocPtr doc; + + rb_scan_args(argc, argv, "0*", &rest); + uri = rb_ary_entry(rest, (long)0); + external_id = rb_ary_entry(rest, (long)1); + + doc = htmlNewDoc( + RTEST(uri) ? (const xmlChar *)StringValueCStr(uri) : NULL, + RTEST(external_id) ? (const xmlChar *)StringValueCStr(external_id) : NULL + ); + rb_doc = noko_xml_document_wrap_with_init_args(klass, doc, argc, argv); + return rb_doc ; +} + +/* + * call-seq: + * read_io(io, url, encoding, options) + * + * Read the HTML document from +io+ with given +url+, +encoding+, + * and +options+. See Nokogiri::HTML4.parse + */ +static VALUE +rb_html_document_s_read_io(VALUE klass, VALUE rb_io, VALUE rb_url, VALUE rb_encoding, VALUE rb_options) +{ + VALUE rb_doc; + VALUE rb_error_list = rb_ary_new(); + htmlDocPtr c_doc; + const char *c_url = NIL_P(rb_url) ? NULL : StringValueCStr(rb_url); + const char *c_encoding = NIL_P(rb_encoding) ? NULL : StringValueCStr(rb_encoding); + int options = NUM2INT(rb_options); + + xmlSetStructuredErrorFunc((void *)rb_error_list, noko__error_array_pusher); + + c_doc = htmlReadIO(noko_io_read, noko_io_close, (void *)rb_io, c_url, c_encoding, options); + + xmlSetStructuredErrorFunc(NULL, NULL); + + /* + * If EncodingFound has occurred in EncodingReader, make sure to do + * a cleanup and propagate the error. + */ + if (rb_respond_to(rb_io, id_encoding_found)) { + VALUE encoding_found = rb_funcall(rb_io, id_encoding_found, 0); + if (!NIL_P(encoding_found)) { + xmlFreeDoc(c_doc); + rb_exc_raise(encoding_found); + } + } + + if ((c_doc == NULL) || (!(options & XML_PARSE_RECOVER) && (RARRAY_LEN(rb_error_list) > 0))) { + VALUE rb_error ; + + xmlFreeDoc(c_doc); + + rb_error = rb_ary_entry(rb_error_list, 0); + if (rb_error == Qnil) { + rb_raise(rb_eRuntimeError, "Could not parse document"); + } else { + VALUE exception_message = rb_funcall(rb_error, id_to_s, 0); + exception_message = rb_str_concat(rb_str_new2("Parser without recover option encountered error or warning: "), + exception_message); + rb_exc_raise(rb_class_new_instance(1, &exception_message, cNokogiriXmlSyntaxError)); + } + + return Qnil; + } + + rb_doc = noko_xml_document_wrap(klass, c_doc); + rb_iv_set(rb_doc, "@errors", rb_error_list); + return rb_doc; +} + +/* + * call-seq: + * read_memory(string, url, encoding, options) + * + * Read the HTML document contained in +string+ with given +url+, +encoding+, + * and +options+. See Nokogiri::HTML4.parse + */ +static VALUE +rb_html_document_s_read_memory(VALUE klass, VALUE rb_html, VALUE rb_url, VALUE rb_encoding, VALUE rb_options) +{ + VALUE rb_doc; + VALUE rb_error_list = rb_ary_new(); + htmlDocPtr c_doc; + const char *c_buffer = StringValuePtr(rb_html); + const char *c_url = NIL_P(rb_url) ? NULL : StringValueCStr(rb_url); + const char *c_encoding = NIL_P(rb_encoding) ? NULL : StringValueCStr(rb_encoding); + int html_len = (int)RSTRING_LEN(rb_html); + int options = NUM2INT(rb_options); + + xmlSetStructuredErrorFunc((void *)rb_error_list, noko__error_array_pusher); + + c_doc = htmlReadMemory(c_buffer, html_len, c_url, c_encoding, options); + + xmlSetStructuredErrorFunc(NULL, NULL); + + if ((c_doc == NULL) || (!(options & XML_PARSE_RECOVER) && (RARRAY_LEN(rb_error_list) > 0))) { + VALUE rb_error ; + + xmlFreeDoc(c_doc); + + rb_error = rb_ary_entry(rb_error_list, 0); + if (rb_error == Qnil) { + rb_raise(rb_eRuntimeError, "Could not parse document"); + } else { + VALUE exception_message = rb_funcall(rb_error, id_to_s, 0); + exception_message = rb_str_concat(rb_str_new2("Parser without recover option encountered error or warning: "), + exception_message); + rb_exc_raise(rb_class_new_instance(1, &exception_message, cNokogiriXmlSyntaxError)); + } + + return Qnil; + } + + rb_doc = noko_xml_document_wrap(klass, c_doc); + rb_iv_set(rb_doc, "@errors", rb_error_list); + return rb_doc; +} + +/* + * call-seq: + * type + * + * The type for this document + */ +static VALUE +rb_html_document_type(VALUE self) +{ + htmlDocPtr doc = noko_xml_document_unwrap(self); + return INT2NUM(doc->type); +} + +void +noko_init_html_document(void) +{ + /* this is here so that rdoc doesn't ignore this file. */ + /* + mNokogiri = rb_define_module("Nokogiri"); + mNokogiriHtml4 = rb_define_module_under(mNokogiri, "HTML4"); + */ + + assert(cNokogiriXmlDocument); + cNokogiriHtml4Document = rb_define_class_under(mNokogiriHtml4, "Document", cNokogiriXmlDocument); + + rb_define_singleton_method(cNokogiriHtml4Document, "read_memory", rb_html_document_s_read_memory, 4); + rb_define_singleton_method(cNokogiriHtml4Document, "read_io", rb_html_document_s_read_io, 4); + rb_define_singleton_method(cNokogiriHtml4Document, "new", rb_html_document_s_new, -1); + + rb_define_method(cNokogiriHtml4Document, "type", rb_html_document_type, 0); + + id_encoding_found = rb_intern("encoding_found"); + id_to_s = rb_intern("to_s"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_element_description.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_element_description.c new file mode 100644 index 000000000..bd345d1a2 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_element_description.c @@ -0,0 +1,299 @@ +#include + +static const rb_data_type_t html_elem_desc_type = { + .wrap_struct_name = "htmlElemDesc", + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +VALUE cNokogiriHtml4ElementDescription ; + +/* + * call-seq: + * required_attributes + * + * A list of required attributes for this element + */ +static VALUE +required_attributes(VALUE self) +{ + const htmlElemDesc *description; + VALUE list; + int i; + + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + list = rb_ary_new(); + + if (NULL == description->attrs_req) { return list; } + + for (i = 0; description->attrs_depr[i]; i++) { + rb_ary_push(list, NOKOGIRI_STR_NEW2(description->attrs_req[i])); + } + + return list; +} + +/* + * call-seq: + * deprecated_attributes + * + * A list of deprecated attributes for this element + */ +static VALUE +deprecated_attributes(VALUE self) +{ + const htmlElemDesc *description; + VALUE list; + int i; + + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + list = rb_ary_new(); + + if (NULL == description->attrs_depr) { return list; } + + for (i = 0; description->attrs_depr[i]; i++) { + rb_ary_push(list, NOKOGIRI_STR_NEW2(description->attrs_depr[i])); + } + + return list; +} + +/* + * call-seq: + * optional_attributes + * + * A list of optional attributes for this element + */ +static VALUE +optional_attributes(VALUE self) +{ + const htmlElemDesc *description; + VALUE list; + int i; + + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + list = rb_ary_new(); + + if (NULL == description->attrs_opt) { return list; } + + for (i = 0; description->attrs_opt[i]; i++) { + rb_ary_push(list, NOKOGIRI_STR_NEW2(description->attrs_opt[i])); + } + + return list; +} + +/* + * call-seq: + * default_sub_element + * + * The default sub element for this element + */ +static VALUE +default_sub_element(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->defaultsubelt) { + return NOKOGIRI_STR_NEW2(description->defaultsubelt); + } + + return Qnil; +} + +/* + * call-seq: + * sub_elements + * + * A list of allowed sub elements for this element. + */ +static VALUE +sub_elements(VALUE self) +{ + const htmlElemDesc *description; + VALUE list; + int i; + + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + list = rb_ary_new(); + + if (NULL == description->subelts) { return list; } + + for (i = 0; description->subelts[i]; i++) { + rb_ary_push(list, NOKOGIRI_STR_NEW2(description->subelts[i])); + } + + return list; +} + +/* + * call-seq: + * description + * + * The description for this element + */ +static VALUE +description(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + return NOKOGIRI_STR_NEW2(description->desc); +} + +/* + * call-seq: + * inline? + * + * Is this element an inline element? + */ +static VALUE +inline_eh(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->isinline) { return Qtrue; } + return Qfalse; +} + +/* + * call-seq: + * deprecated? + * + * Is this element deprecated? + */ +static VALUE +deprecated_eh(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->depr) { return Qtrue; } + return Qfalse; +} + +/* + * call-seq: + * empty? + * + * Is this an empty element? + */ +static VALUE +empty_eh(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->empty) { return Qtrue; } + return Qfalse; +} + +/* + * call-seq: + * save_end_tag? + * + * Should the end tag be saved? + */ +static VALUE +save_end_tag_eh(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->saveEndTag) { return Qtrue; } + return Qfalse; +} + +/* + * call-seq: + * implied_end_tag? + * + * Can the end tag be implied for this tag? + */ +static VALUE +implied_end_tag_eh(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->endTag) { return Qtrue; } + return Qfalse; +} + +/* + * call-seq: + * implied_start_tag? + * + * Can the start tag be implied for this tag? + */ +static VALUE +implied_start_tag_eh(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (description->startTag) { return Qtrue; } + return Qfalse; +} + +/* + * call-seq: + * name + * + * Get the tag name for this ElementDescription + */ +static VALUE +name(VALUE self) +{ + const htmlElemDesc *description; + TypedData_Get_Struct(self, htmlElemDesc, &html_elem_desc_type, description); + + if (NULL == description->name) { return Qnil; } + return NOKOGIRI_STR_NEW2(description->name); +} + +/* + * call-seq: + * [](tag_name) + * + * Get ElementDescription for +tag_name+ + */ +static VALUE +get_description(VALUE klass, VALUE tag_name) +{ + const htmlElemDesc *description = htmlTagLookup( + (const xmlChar *)StringValueCStr(tag_name) + ); + + if (NULL == description) { return Qnil; } + return TypedData_Wrap_Struct(klass, &html_elem_desc_type, DISCARD_CONST_QUAL(void *, description)); +} + +void +noko_init_html_element_description(void) +{ + cNokogiriHtml4ElementDescription = rb_define_class_under(mNokogiriHtml4, "ElementDescription", rb_cObject); + + rb_undef_alloc_func(cNokogiriHtml4ElementDescription); + + rb_define_singleton_method(cNokogiriHtml4ElementDescription, "[]", get_description, 1); + + rb_define_method(cNokogiriHtml4ElementDescription, "name", name, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "implied_start_tag?", implied_start_tag_eh, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "implied_end_tag?", implied_end_tag_eh, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "save_end_tag?", save_end_tag_eh, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "empty?", empty_eh, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "deprecated?", deprecated_eh, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "inline?", inline_eh, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "description", description, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "sub_elements", sub_elements, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "default_sub_element", default_sub_element, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "optional_attributes", optional_attributes, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "deprecated_attributes", deprecated_attributes, 0); + rb_define_method(cNokogiriHtml4ElementDescription, "required_attributes", required_attributes, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_entity_lookup.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_entity_lookup.c new file mode 100644 index 000000000..85ad38425 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_entity_lookup.c @@ -0,0 +1,37 @@ +#include + +static VALUE cNokogiriHtml4EntityLookup; + +/* + * call-seq: + * get(key) + * + * Get the HTML4::EntityDescription for +key+ + */ +static VALUE +get(VALUE _, VALUE rb_entity_name) +{ + VALUE cNokogiriHtml4EntityDescription; + const htmlEntityDesc *c_entity_desc; + VALUE rb_constructor_args[3]; + + c_entity_desc = htmlEntityLookup((const xmlChar *)StringValueCStr(rb_entity_name)); + if (NULL == c_entity_desc) { + return Qnil; + } + + rb_constructor_args[0] = UINT2NUM(c_entity_desc->value); + rb_constructor_args[1] = NOKOGIRI_STR_NEW2(c_entity_desc->name); + rb_constructor_args[2] = NOKOGIRI_STR_NEW2(c_entity_desc->desc); + + cNokogiriHtml4EntityDescription = rb_const_get_at(mNokogiriHtml4, rb_intern("EntityDescription")); + return rb_class_new_instance(3, rb_constructor_args, cNokogiriHtml4EntityDescription); +} + +void +noko_init_html_entity_lookup(void) +{ + cNokogiriHtml4EntityLookup = rb_define_class_under(mNokogiriHtml4, "EntityLookup", rb_cObject); + + rb_define_method(cNokogiriHtml4EntityLookup, "get", get, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser.c new file mode 100644 index 000000000..2316ec2bd --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser.c @@ -0,0 +1,40 @@ +#include + +VALUE cNokogiriHtml4SaxParser; + +static ID id_start_document; + +static void +noko_html4_sax_parser_start_document(void *ctx) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + xmlSAX2StartDocument(ctx); + + rb_funcall(doc, id_start_document, 0); +} + +static VALUE +noko_html4_sax_parser_initialize(VALUE self) +{ + xmlSAXHandlerPtr handler = noko_xml_sax_parser_unwrap(self); + + rb_call_super(0, NULL); + + handler->startDocument = noko_html4_sax_parser_start_document; + + return self; +} + +void +noko_init_html4_sax_parser(void) +{ + cNokogiriHtml4SaxParser = rb_define_class_under(mNokogiriHtml4Sax, "Parser", cNokogiriXmlSaxParser); + + rb_define_private_method(cNokogiriHtml4SaxParser, "initialize_native", + noko_html4_sax_parser_initialize, 0); + + id_start_document = rb_intern("start_document"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser_context.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser_context.c new file mode 100644 index 000000000..6f971d1fc --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_parser_context.c @@ -0,0 +1,98 @@ +#include + +VALUE cNokogiriHtml4SaxParserContext ; + +/* :nodoc: */ +static VALUE +noko_html4_sax_parser_context_s_native_memory(VALUE rb_class, VALUE rb_input, VALUE rb_encoding) +{ + Check_Type(rb_input, T_STRING); + if (!(int)RSTRING_LEN(rb_input)) { + rb_raise(rb_eRuntimeError, "input string cannot be empty"); + } + + if (!NIL_P(rb_encoding) && !rb_obj_is_kind_of(rb_encoding, rb_cEncoding)) { + rb_raise(rb_eTypeError, "argument must be an Encoding object"); + } + + htmlParserCtxtPtr c_context = + htmlCreateMemoryParserCtxt(StringValuePtr(rb_input), (int)RSTRING_LEN(rb_input)); + if (!c_context) { + rb_raise(rb_eRuntimeError, "failed to create xml sax parser context"); + } + + noko_xml_sax_parser_context_set_encoding(c_context, rb_encoding); + + if (c_context->sax) { + xmlFree(c_context->sax); + c_context->sax = NULL; + } + + return noko_xml_sax_parser_context_wrap(rb_class, c_context); +} + +/* :nodoc: */ +static VALUE +noko_html4_sax_parser_context_s_native_file(VALUE rb_class, VALUE rb_filename, VALUE rb_encoding) +{ + if (!NIL_P(rb_encoding) && !rb_obj_is_kind_of(rb_encoding, rb_cEncoding)) { + rb_raise(rb_eTypeError, "argument must be an Encoding object"); + } + + htmlParserCtxtPtr c_context = htmlCreateFileParserCtxt(StringValueCStr(rb_filename), NULL); + if (!c_context) { + rb_raise(rb_eRuntimeError, "failed to create xml sax parser context"); + } + + noko_xml_sax_parser_context_set_encoding(c_context, rb_encoding); + + if (c_context->sax) { + xmlFree(c_context->sax); + c_context->sax = NULL; + } + + return noko_xml_sax_parser_context_wrap(rb_class, c_context); +} + +static VALUE +noko_html4_sax_parser_context__parse_with(VALUE rb_context, VALUE rb_sax_parser) +{ + htmlParserCtxtPtr ctxt; + htmlSAXHandlerPtr sax; + + if (!rb_obj_is_kind_of(rb_sax_parser, cNokogiriXmlSaxParser)) { + rb_raise(rb_eArgError, "argument must be a Nokogiri::XML::SAX::Parser"); + } + + ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + sax = noko_xml_sax_parser_unwrap(rb_sax_parser); + + ctxt->sax = sax; + ctxt->userData = ctxt; /* so we can use libxml2/SAX2.c handlers if we want to */ + ctxt->_private = (void *)rb_sax_parser; + + xmlSetStructuredErrorFunc(NULL, NULL); + + /* although we're calling back into Ruby here, we don't need to worry about exceptions, because we + * don't have any cleanup to do. The only memory we need to free is handled by + * xml_sax_parser_context_type_free */ + htmlParseDocument(ctxt); + + return Qnil; +} + +void +noko_init_html_sax_parser_context(void) +{ + assert(cNokogiriXmlSaxParserContext); + cNokogiriHtml4SaxParserContext = rb_define_class_under(mNokogiriHtml4Sax, "ParserContext", + cNokogiriXmlSaxParserContext); + + rb_define_singleton_method(cNokogiriHtml4SaxParserContext, "native_memory", + noko_html4_sax_parser_context_s_native_memory, 2); + rb_define_singleton_method(cNokogiriHtml4SaxParserContext, "native_file", + noko_html4_sax_parser_context_s_native_file, 2); + + rb_define_method(cNokogiriHtml4SaxParserContext, "parse_with", + noko_html4_sax_parser_context__parse_with, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_push_parser.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_push_parser.c new file mode 100644 index 000000000..845baf0b5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/html4_sax_push_parser.c @@ -0,0 +1,96 @@ +#include + +VALUE cNokogiriHtml4SaxPushParser; + +/* + * Write +chunk+ to PushParser. +last_chunk+ triggers the end_document handle + */ +static VALUE +noko_html4_sax_push_parser__native_write(VALUE self, VALUE rb_chunk, VALUE rb_last_chunk) +{ + xmlParserCtxtPtr ctx; + const char *chunk = NULL; + int size = 0; + int status = 0; + libxmlStructuredErrorHandlerState handler_state; + + ctx = noko_xml_sax_push_parser_unwrap(self); + + if (Qnil != rb_chunk) { + chunk = StringValuePtr(rb_chunk); + size = (int)RSTRING_LEN(rb_chunk); + } + + noko__structured_error_func_save_and_set(&handler_state, NULL, NULL); + + status = htmlParseChunk(ctx, chunk, size, Qtrue == rb_last_chunk ? 1 : 0); + + noko__structured_error_func_restore(&handler_state); + + if ((status != 0) && !(xmlCtxtGetOptions(ctx) & XML_PARSE_RECOVER)) { + // TODO: there appear to be no tests for this block + xmlErrorConstPtr e = xmlCtxtGetLastError(ctx); + noko__error_raise(NULL, e); + } + + return self; +} + +/* + * Initialize the push parser with +xml_sax+ using +filename+ + */ +static VALUE +noko_html4_sax_push_parser__initialize_native( + VALUE self, + VALUE rb_xml_sax, + VALUE rb_filename, + VALUE encoding +) +{ + htmlSAXHandlerPtr sax; + const char *filename = NULL; + htmlParserCtxtPtr ctx; + xmlCharEncoding enc = XML_CHAR_ENCODING_NONE; + + sax = noko_xml_sax_parser_unwrap(rb_xml_sax); + + if (rb_filename != Qnil) { filename = StringValueCStr(rb_filename); } + + if (!NIL_P(encoding)) { + enc = xmlParseCharEncoding(StringValueCStr(encoding)); + if (enc == XML_CHAR_ENCODING_ERROR) { + rb_raise(rb_eArgError, "Unsupported Encoding"); + } + } + + ctx = htmlCreatePushParserCtxt( + sax, + NULL, + NULL, + 0, + filename, + enc + ); + if (ctx == NULL) { + rb_raise(rb_eRuntimeError, "Could not create a parser context"); + } + + ctx->userData = ctx; + ctx->_private = (void *)rb_xml_sax; + + DATA_PTR(self) = ctx; + return self; +} + +void +noko_init_html_sax_push_parser(void) +{ + assert(cNokogiriXmlSaxPushParser); + cNokogiriHtml4SaxPushParser = + rb_define_class_under(mNokogiriHtml4Sax, "PushParser", cNokogiriXmlSaxPushParser); + + rb_define_private_method(cNokogiriHtml4SaxPushParser, "initialize_native", + noko_html4_sax_push_parser__initialize_native, 3); + rb_define_private_method(cNokogiriHtml4SaxPushParser, "native_write", + noko_html4_sax_push_parser__native_write, 2); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exslt.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exslt.h new file mode 100644 index 000000000..dfbd09bef --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exslt.h @@ -0,0 +1,108 @@ +/* + * Summary: main header file + * + * Copy: See Copyright for the status of this software. + */ + + +#ifndef __EXSLT_H__ +#define __EXSLT_H__ + +#include +#include +#include "exsltexports.h" +#include + +#ifdef __cplusplus +extern "C" { +#endif + +EXSLTPUBVAR const char *exsltLibraryVersion; +EXSLTPUBVAR const int exsltLibexsltVersion; +EXSLTPUBVAR const int exsltLibxsltVersion; +EXSLTPUBVAR const int exsltLibxmlVersion; + +/** + * EXSLT_COMMON_NAMESPACE: + * + * Namespace for EXSLT common functions + */ +#define EXSLT_COMMON_NAMESPACE ((const xmlChar *) "http://exslt.org/common") +/** + * EXSLT_CRYPTO_NAMESPACE: + * + * Namespace for EXSLT crypto functions + */ +#define EXSLT_CRYPTO_NAMESPACE ((const xmlChar *) "http://exslt.org/crypto") +/** + * EXSLT_MATH_NAMESPACE: + * + * Namespace for EXSLT math functions + */ +#define EXSLT_MATH_NAMESPACE ((const xmlChar *) "http://exslt.org/math") +/** + * EXSLT_SETS_NAMESPACE: + * + * Namespace for EXSLT set functions + */ +#define EXSLT_SETS_NAMESPACE ((const xmlChar *) "http://exslt.org/sets") +/** + * EXSLT_FUNCTIONS_NAMESPACE: + * + * Namespace for EXSLT functions extension functions + */ +#define EXSLT_FUNCTIONS_NAMESPACE ((const xmlChar *) "http://exslt.org/functions") +/** + * EXSLT_STRINGS_NAMESPACE: + * + * Namespace for EXSLT strings functions + */ +#define EXSLT_STRINGS_NAMESPACE ((const xmlChar *) "http://exslt.org/strings") +/** + * EXSLT_DATE_NAMESPACE: + * + * Namespace for EXSLT date functions + */ +#define EXSLT_DATE_NAMESPACE ((const xmlChar *) "http://exslt.org/dates-and-times") +/** + * EXSLT_DYNAMIC_NAMESPACE: + * + * Namespace for EXSLT dynamic functions + */ +#define EXSLT_DYNAMIC_NAMESPACE ((const xmlChar *) "http://exslt.org/dynamic") + +/** + * SAXON_NAMESPACE: + * + * Namespace for SAXON extensions functions + */ +#define SAXON_NAMESPACE ((const xmlChar *) "http://icl.com/saxon") + +EXSLTPUBFUN void EXSLTCALL exsltCommonRegister (void); +#ifdef EXSLT_CRYPTO_ENABLED +EXSLTPUBFUN void EXSLTCALL exsltCryptoRegister (void); +#endif +EXSLTPUBFUN void EXSLTCALL exsltMathRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltSetsRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltFuncRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltStrRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltDateRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltSaxonRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltDynRegister(void); + +EXSLTPUBFUN void EXSLTCALL exsltRegisterAll (void); + +EXSLTPUBFUN int EXSLTCALL exsltDateXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +EXSLTPUBFUN int EXSLTCALL exsltMathXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +EXSLTPUBFUN int EXSLTCALL exsltSetsXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +EXSLTPUBFUN int EXSLTCALL exsltStrXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); + +#ifdef __cplusplus +} +#endif +#endif /* __EXSLT_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltconfig.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltconfig.h new file mode 100644 index 000000000..10e43b0f5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltconfig.h @@ -0,0 +1,70 @@ +/* + * exsltconfig.h: compile-time version information for the EXSLT library + * + * See Copyright for the status of this software. + * + * daniel@veillard.com + */ + +#ifndef __XML_EXSLTCONFIG_H__ +#define __XML_EXSLTCONFIG_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * LIBEXSLT_DOTTED_VERSION: + * + * the version string like "1.2.3" + */ +#define LIBEXSLT_DOTTED_VERSION "0.8.24" + +/** + * LIBEXSLT_VERSION: + * + * the version number: 1.2.3 value is 10203 + */ +#define LIBEXSLT_VERSION 824 + +/** + * LIBEXSLT_VERSION_STRING: + * + * the version number string, 1.2.3 value is "10203" + */ +#define LIBEXSLT_VERSION_STRING "824" + +/** + * LIBEXSLT_VERSION_EXTRA: + * + * extra version information, used to show a Git commit description + */ +#define LIBEXSLT_VERSION_EXTRA "" + +/** + * WITH_CRYPTO: + * + * Whether crypto support is configured into exslt + */ +#if 0 +#define EXSLT_CRYPTO_ENABLED +#endif + +/** + * ATTRIBUTE_UNUSED: + * + * This macro is used to flag unused function parameters to GCC + */ +#ifdef __GNUC__ +#ifndef ATTRIBUTE_UNUSED +#define ATTRIBUTE_UNUSED __attribute__((unused)) +#endif +#else +#define ATTRIBUTE_UNUSED +#endif + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_EXSLTCONFIG_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltexports.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltexports.h new file mode 100644 index 000000000..ee79ec7ae --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libexslt/exsltexports.h @@ -0,0 +1,63 @@ +/* + * Summary: macros for marking symbols as exportable/importable. + * + * Copy: See Copyright for the status of this software. + */ + +#ifndef __EXSLT_EXPORTS_H__ +#define __EXSLT_EXPORTS_H__ + +#if defined(_WIN32) || defined(__CYGWIN__) +/** DOC_DISABLE */ + +#ifdef LIBEXSLT_STATIC + #define EXSLTPUBLIC +#elif defined(IN_LIBEXSLT) + #define EXSLTPUBLIC __declspec(dllexport) +#else + #define EXSLTPUBLIC __declspec(dllimport) +#endif + +#define EXSLTCALL __cdecl + +/** DOC_ENABLE */ +#else /* not Windows */ + +/** + * EXSLTPUBLIC: + * + * Macro which declares a public symbol + */ +#define EXSLTPUBLIC + +/** + * EXSLTCALL: + * + * Macro which declares the calling convention for exported functions + */ +#define EXSLTCALL + +#endif /* platform switch */ + +/* + * EXSLTPUBFUN: + * + * Macro which declares an exportable function + */ +#define EXSLTPUBFUN EXSLTPUBLIC + +/** + * EXSLTPUBVAR: + * + * Macro which declares an exportable variable + */ +#define EXSLTPUBVAR EXSLTPUBLIC extern + +/* Compatibility */ +#if !defined(LIBEXSLT_PUBLIC) +#define LIBEXSLT_PUBLIC EXSLTPUBVAR +#endif + +#endif /* __EXSLT_EXPORTS_H__ */ + + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLparser.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLparser.h new file mode 100644 index 000000000..7be3d2b8a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLparser.h @@ -0,0 +1,336 @@ +/* + * Summary: interface for an HTML 4.0 non-verifying parser + * Description: this module implements an HTML 4.0 non-verifying parser + * with API compatible with the XML parser ones. It should + * be able to parse "real world" HTML, even if severely + * broken from a specification point of view. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __HTML_PARSER_H__ +#define __HTML_PARSER_H__ +#include +#include + +#ifdef LIBXML_HTML_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Most of the back-end structures from XML and HTML are shared. + */ +typedef xmlParserCtxt htmlParserCtxt; +typedef xmlParserCtxtPtr htmlParserCtxtPtr; +typedef xmlParserNodeInfo htmlParserNodeInfo; +typedef xmlSAXHandler htmlSAXHandler; +typedef xmlSAXHandlerPtr htmlSAXHandlerPtr; +typedef xmlParserInput htmlParserInput; +typedef xmlParserInputPtr htmlParserInputPtr; +typedef xmlDocPtr htmlDocPtr; +typedef xmlNodePtr htmlNodePtr; + +/* + * Internal description of an HTML element, representing HTML 4.01 + * and XHTML 1.0 (which share the same structure). + */ +typedef struct _htmlElemDesc htmlElemDesc; +typedef htmlElemDesc *htmlElemDescPtr; +struct _htmlElemDesc { + const char *name; /* The tag name */ + char startTag; /* Whether the start tag can be implied */ + char endTag; /* Whether the end tag can be implied */ + char saveEndTag; /* Whether the end tag should be saved */ + char empty; /* Is this an empty element ? */ + char depr; /* Is this a deprecated element ? */ + char dtd; /* 1: only in Loose DTD, 2: only Frameset one */ + char isinline; /* is this a block 0 or inline 1 element */ + const char *desc; /* the description */ + +/* NRK Jan.2003 + * New fields encapsulating HTML structure + * + * Bugs: + * This is a very limited representation. It fails to tell us when + * an element *requires* subelements (we only have whether they're + * allowed or not), and it doesn't tell us where CDATA and PCDATA + * are allowed. Some element relationships are not fully represented: + * these are flagged with the word MODIFIER + */ + const char** subelts; /* allowed sub-elements of this element */ + const char* defaultsubelt; /* subelement for suggested auto-repair + if necessary or NULL */ + const char** attrs_opt; /* Optional Attributes */ + const char** attrs_depr; /* Additional deprecated attributes */ + const char** attrs_req; /* Required attributes */ +}; + +/* + * Internal description of an HTML entity. + */ +typedef struct _htmlEntityDesc htmlEntityDesc; +typedef htmlEntityDesc *htmlEntityDescPtr; +struct _htmlEntityDesc { + unsigned int value; /* the UNICODE value for the character */ + const char *name; /* The entity name */ + const char *desc; /* the description */ +}; + +#ifdef LIBXML_SAX1_ENABLED + +XML_DEPRECATED +XMLPUBVAR const xmlSAXHandlerV1 htmlDefaultSAXHandler; + +#ifdef LIBXML_THREAD_ENABLED +XML_DEPRECATED +XMLPUBFUN const xmlSAXHandlerV1 *__htmlDefaultSAXHandler(void); +#endif + +#endif /* LIBXML_SAX1_ENABLED */ + +/* + * There is only few public functions. + */ +XML_DEPRECATED +XMLPUBFUN void + htmlInitAutoClose (void); +XMLPUBFUN const htmlElemDesc * + htmlTagLookup (const xmlChar *tag); +XMLPUBFUN const htmlEntityDesc * + htmlEntityLookup(const xmlChar *name); +XMLPUBFUN const htmlEntityDesc * + htmlEntityValueLookup(unsigned int value); + +XMLPUBFUN int + htmlIsAutoClosed(htmlDocPtr doc, + htmlNodePtr elem); +XMLPUBFUN int + htmlAutoCloseTag(htmlDocPtr doc, + const xmlChar *name, + htmlNodePtr elem); +XML_DEPRECATED +XMLPUBFUN const htmlEntityDesc * + htmlParseEntityRef(htmlParserCtxtPtr ctxt, + const xmlChar **str); +XML_DEPRECATED +XMLPUBFUN int + htmlParseCharRef(htmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + htmlParseElement(htmlParserCtxtPtr ctxt); + +XMLPUBFUN htmlParserCtxtPtr + htmlNewParserCtxt(void); +XMLPUBFUN htmlParserCtxtPtr + htmlNewSAXParserCtxt(const htmlSAXHandler *sax, + void *userData); + +XMLPUBFUN htmlParserCtxtPtr + htmlCreateMemoryParserCtxt(const char *buffer, + int size); + +XMLPUBFUN int + htmlParseDocument(htmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN htmlDocPtr + htmlSAXParseDoc (const xmlChar *cur, + const char *encoding, + htmlSAXHandlerPtr sax, + void *userData); +XMLPUBFUN htmlDocPtr + htmlParseDoc (const xmlChar *cur, + const char *encoding); +XMLPUBFUN htmlParserCtxtPtr + htmlCreateFileParserCtxt(const char *filename, + const char *encoding); +XML_DEPRECATED +XMLPUBFUN htmlDocPtr + htmlSAXParseFile(const char *filename, + const char *encoding, + htmlSAXHandlerPtr sax, + void *userData); +XMLPUBFUN htmlDocPtr + htmlParseFile (const char *filename, + const char *encoding); +XMLPUBFUN int + UTF8ToHtml (unsigned char *out, + int *outlen, + const unsigned char *in, + int *inlen); +XMLPUBFUN int + htmlEncodeEntities(unsigned char *out, + int *outlen, + const unsigned char *in, + int *inlen, int quoteChar); +XMLPUBFUN int + htmlIsScriptAttribute(const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN int + htmlHandleOmittedElem(int val); + +#ifdef LIBXML_PUSH_ENABLED +/** + * Interfaces for the Push mode. + */ +XMLPUBFUN htmlParserCtxtPtr + htmlCreatePushParserCtxt(htmlSAXHandlerPtr sax, + void *user_data, + const char *chunk, + int size, + const char *filename, + xmlCharEncoding enc); +XMLPUBFUN int + htmlParseChunk (htmlParserCtxtPtr ctxt, + const char *chunk, + int size, + int terminate); +#endif /* LIBXML_PUSH_ENABLED */ + +XMLPUBFUN void + htmlFreeParserCtxt (htmlParserCtxtPtr ctxt); + +/* + * New set of simpler/more flexible APIs + */ +/** + * xmlParserOption: + * + * This is the set of XML parser options that can be passed down + * to the xmlReadDoc() and similar calls. + */ +typedef enum { + HTML_PARSE_RECOVER = 1<<0, /* Relaxed parsing */ + HTML_PARSE_NODEFDTD = 1<<2, /* do not default a doctype if not found */ + HTML_PARSE_NOERROR = 1<<5, /* suppress error reports */ + HTML_PARSE_NOWARNING= 1<<6, /* suppress warning reports */ + HTML_PARSE_PEDANTIC = 1<<7, /* pedantic error reporting */ + HTML_PARSE_NOBLANKS = 1<<8, /* remove blank nodes */ + HTML_PARSE_NONET = 1<<11,/* Forbid network access */ + HTML_PARSE_NOIMPLIED= 1<<13,/* Do not add implied html/body... elements */ + HTML_PARSE_COMPACT = 1<<16,/* compact small text nodes */ + HTML_PARSE_IGNORE_ENC=1<<21 /* ignore internal document encoding hint */ +} htmlParserOption; + +XMLPUBFUN void + htmlCtxtReset (htmlParserCtxtPtr ctxt); +XMLPUBFUN int + htmlCtxtUseOptions (htmlParserCtxtPtr ctxt, + int options); +XMLPUBFUN htmlDocPtr + htmlReadDoc (const xmlChar *cur, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlReadFile (const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlReadMemory (const char *buffer, + int size, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlReadFd (int fd, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlReadIO (xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlCtxtParseDocument (htmlParserCtxtPtr ctxt, + xmlParserInputPtr input); +XMLPUBFUN htmlDocPtr + htmlCtxtReadDoc (xmlParserCtxtPtr ctxt, + const xmlChar *cur, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlCtxtReadFile (xmlParserCtxtPtr ctxt, + const char *filename, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlCtxtReadMemory (xmlParserCtxtPtr ctxt, + const char *buffer, + int size, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlCtxtReadFd (xmlParserCtxtPtr ctxt, + int fd, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN htmlDocPtr + htmlCtxtReadIO (xmlParserCtxtPtr ctxt, + xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + const char *URL, + const char *encoding, + int options); + +/* NRK/Jan2003: further knowledge of HTML structure + */ +typedef enum { + HTML_NA = 0 , /* something we don't check at all */ + HTML_INVALID = 0x1 , + HTML_DEPRECATED = 0x2 , + HTML_VALID = 0x4 , + HTML_REQUIRED = 0xc /* VALID bit set so ( & HTML_VALID ) is TRUE */ +} htmlStatus ; + +/* Using htmlElemDesc rather than name here, to emphasise the fact + that otherwise there's a lookup overhead +*/ +XMLPUBFUN htmlStatus htmlAttrAllowed(const htmlElemDesc*, const xmlChar*, int) ; +XMLPUBFUN int htmlElementAllowedHere(const htmlElemDesc*, const xmlChar*) ; +XMLPUBFUN htmlStatus htmlElementStatusHere(const htmlElemDesc*, const htmlElemDesc*) ; +XMLPUBFUN htmlStatus htmlNodeStatus(htmlNodePtr, int) ; +/** + * htmlDefaultSubelement: + * @elt: HTML element + * + * Returns the default subelement for this element + */ +#define htmlDefaultSubelement(elt) elt->defaultsubelt +/** + * htmlElementAllowedHereDesc: + * @parent: HTML parent element + * @elt: HTML element + * + * Checks whether an HTML element description may be a + * direct child of the specified element. + * + * Returns 1 if allowed; 0 otherwise. + */ +#define htmlElementAllowedHereDesc(parent,elt) \ + htmlElementAllowedHere((parent), (elt)->name) +/** + * htmlRequiredAttrs: + * @elt: HTML element + * + * Returns the attributes required for the specified element. + */ +#define htmlRequiredAttrs(elt) (elt)->attrs_req + + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_HTML_ENABLED */ +#endif /* __HTML_PARSER_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLtree.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLtree.h new file mode 100644 index 000000000..8e1ba90e9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/HTMLtree.h @@ -0,0 +1,147 @@ +/* + * Summary: specific APIs to process HTML tree, especially serialization + * Description: this module implements a few function needed to process + * tree in an HTML specific way. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __HTML_TREE_H__ +#define __HTML_TREE_H__ + +#include +#include +#include +#include + +#ifdef LIBXML_HTML_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + + +/** + * HTML_TEXT_NODE: + * + * Macro. A text node in a HTML document is really implemented + * the same way as a text node in an XML document. + */ +#define HTML_TEXT_NODE XML_TEXT_NODE +/** + * HTML_ENTITY_REF_NODE: + * + * Macro. An entity reference in a HTML document is really implemented + * the same way as an entity reference in an XML document. + */ +#define HTML_ENTITY_REF_NODE XML_ENTITY_REF_NODE +/** + * HTML_COMMENT_NODE: + * + * Macro. A comment in a HTML document is really implemented + * the same way as a comment in an XML document. + */ +#define HTML_COMMENT_NODE XML_COMMENT_NODE +/** + * HTML_PRESERVE_NODE: + * + * Macro. A preserved node in a HTML document is really implemented + * the same way as a CDATA section in an XML document. + */ +#define HTML_PRESERVE_NODE XML_CDATA_SECTION_NODE +/** + * HTML_PI_NODE: + * + * Macro. A processing instruction in a HTML document is really implemented + * the same way as a processing instruction in an XML document. + */ +#define HTML_PI_NODE XML_PI_NODE + +XMLPUBFUN htmlDocPtr + htmlNewDoc (const xmlChar *URI, + const xmlChar *ExternalID); +XMLPUBFUN htmlDocPtr + htmlNewDocNoDtD (const xmlChar *URI, + const xmlChar *ExternalID); +XMLPUBFUN const xmlChar * + htmlGetMetaEncoding (htmlDocPtr doc); +XMLPUBFUN int + htmlSetMetaEncoding (htmlDocPtr doc, + const xmlChar *encoding); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + htmlDocDumpMemory (xmlDocPtr cur, + xmlChar **mem, + int *size); +XMLPUBFUN void + htmlDocDumpMemoryFormat (xmlDocPtr cur, + xmlChar **mem, + int *size, + int format); +XMLPUBFUN int + htmlDocDump (FILE *f, + xmlDocPtr cur); +XMLPUBFUN int + htmlSaveFile (const char *filename, + xmlDocPtr cur); +XMLPUBFUN int + htmlNodeDump (xmlBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur); +XMLPUBFUN void + htmlNodeDumpFile (FILE *out, + xmlDocPtr doc, + xmlNodePtr cur); +XMLPUBFUN int + htmlNodeDumpFileFormat (FILE *out, + xmlDocPtr doc, + xmlNodePtr cur, + const char *encoding, + int format); +XMLPUBFUN int + htmlSaveFileEnc (const char *filename, + xmlDocPtr cur, + const char *encoding); +XMLPUBFUN int + htmlSaveFileFormat (const char *filename, + xmlDocPtr cur, + const char *encoding, + int format); + +XMLPUBFUN void + htmlNodeDumpFormatOutput(xmlOutputBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + const char *encoding, + int format); +XMLPUBFUN void + htmlDocContentDumpOutput(xmlOutputBufferPtr buf, + xmlDocPtr cur, + const char *encoding); +XMLPUBFUN void + htmlDocContentDumpFormatOutput(xmlOutputBufferPtr buf, + xmlDocPtr cur, + const char *encoding, + int format); +XMLPUBFUN void + htmlNodeDumpOutput (xmlOutputBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + const char *encoding); + +#endif /* LIBXML_OUTPUT_ENABLED */ + +XMLPUBFUN int + htmlIsBooleanAttr (const xmlChar *name); + + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_HTML_ENABLED */ + +#endif /* __HTML_TREE_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX.h new file mode 100644 index 000000000..eea1057bf --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX.h @@ -0,0 +1,202 @@ +/* + * Summary: Old SAX version 1 handler, deprecated + * Description: DEPRECATED set of SAX version 1 interfaces used to + * build the DOM tree. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SAX_H__ +#define __XML_SAX_H__ + +#include +#include + +#ifdef LIBXML_LEGACY_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif +XML_DEPRECATED +XMLPUBFUN const xmlChar * + getPublicId (void *ctx); +XML_DEPRECATED +XMLPUBFUN const xmlChar * + getSystemId (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + setDocumentLocator (void *ctx, + xmlSAXLocatorPtr loc); + +XML_DEPRECATED +XMLPUBFUN int + getLineNumber (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + getColumnNumber (void *ctx); + +XML_DEPRECATED +XMLPUBFUN int + isStandalone (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + hasInternalSubset (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + hasExternalSubset (void *ctx); + +XML_DEPRECATED +XMLPUBFUN void + internalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XML_DEPRECATED +XMLPUBFUN void + externalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XML_DEPRECATED +XMLPUBFUN xmlEntityPtr + getEntity (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN xmlEntityPtr + getParameterEntity (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN xmlParserInputPtr + resolveEntity (void *ctx, + const xmlChar *publicId, + const xmlChar *systemId); + +XML_DEPRECATED +XMLPUBFUN void + entityDecl (void *ctx, + const xmlChar *name, + int type, + const xmlChar *publicId, + const xmlChar *systemId, + xmlChar *content); +XML_DEPRECATED +XMLPUBFUN void + attributeDecl (void *ctx, + const xmlChar *elem, + const xmlChar *fullname, + int type, + int def, + const xmlChar *defaultValue, + xmlEnumerationPtr tree); +XML_DEPRECATED +XMLPUBFUN void + elementDecl (void *ctx, + const xmlChar *name, + int type, + xmlElementContentPtr content); +XML_DEPRECATED +XMLPUBFUN void + notationDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId); +XML_DEPRECATED +XMLPUBFUN void + unparsedEntityDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId, + const xmlChar *notationName); + +XML_DEPRECATED +XMLPUBFUN void + startDocument (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + endDocument (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + attribute (void *ctx, + const xmlChar *fullname, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN void + startElement (void *ctx, + const xmlChar *fullname, + const xmlChar **atts); +XML_DEPRECATED +XMLPUBFUN void + endElement (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN void + reference (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN void + characters (void *ctx, + const xmlChar *ch, + int len); +XML_DEPRECATED +XMLPUBFUN void + ignorableWhitespace (void *ctx, + const xmlChar *ch, + int len); +XML_DEPRECATED +XMLPUBFUN void + processingInstruction (void *ctx, + const xmlChar *target, + const xmlChar *data); +XML_DEPRECATED +XMLPUBFUN void + globalNamespace (void *ctx, + const xmlChar *href, + const xmlChar *prefix); +XML_DEPRECATED +XMLPUBFUN void + setNamespace (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN xmlNsPtr + getNamespace (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + checkNamespace (void *ctx, + xmlChar *nameSpace); +XML_DEPRECATED +XMLPUBFUN void + namespaceDecl (void *ctx, + const xmlChar *href, + const xmlChar *prefix); +XML_DEPRECATED +XMLPUBFUN void + comment (void *ctx, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN void + cdataBlock (void *ctx, + const xmlChar *value, + int len); + +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN void + initxmlDefaultSAXHandler (xmlSAXHandlerV1 *hdlr, + int warning); +#ifdef LIBXML_HTML_ENABLED +XML_DEPRECATED +XMLPUBFUN void + inithtmlDefaultSAXHandler (xmlSAXHandlerV1 *hdlr); +#endif +#endif /* LIBXML_SAX1_ENABLED */ + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_LEGACY_ENABLED */ + +#endif /* __XML_SAX_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX2.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX2.h new file mode 100644 index 000000000..4c4ecce8e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/SAX2.h @@ -0,0 +1,171 @@ +/* + * Summary: SAX2 parser interface used to build the DOM tree + * Description: those are the default SAX2 interfaces used by + * the library when building DOM tree. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SAX2_H__ +#define __XML_SAX2_H__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif +XMLPUBFUN const xmlChar * + xmlSAX2GetPublicId (void *ctx); +XMLPUBFUN const xmlChar * + xmlSAX2GetSystemId (void *ctx); +XMLPUBFUN void + xmlSAX2SetDocumentLocator (void *ctx, + xmlSAXLocatorPtr loc); + +XMLPUBFUN int + xmlSAX2GetLineNumber (void *ctx); +XMLPUBFUN int + xmlSAX2GetColumnNumber (void *ctx); + +XMLPUBFUN int + xmlSAX2IsStandalone (void *ctx); +XMLPUBFUN int + xmlSAX2HasInternalSubset (void *ctx); +XMLPUBFUN int + xmlSAX2HasExternalSubset (void *ctx); + +XMLPUBFUN void + xmlSAX2InternalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN void + xmlSAX2ExternalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlEntityPtr + xmlSAX2GetEntity (void *ctx, + const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlSAX2GetParameterEntity (void *ctx, + const xmlChar *name); +XMLPUBFUN xmlParserInputPtr + xmlSAX2ResolveEntity (void *ctx, + const xmlChar *publicId, + const xmlChar *systemId); + +XMLPUBFUN void + xmlSAX2EntityDecl (void *ctx, + const xmlChar *name, + int type, + const xmlChar *publicId, + const xmlChar *systemId, + xmlChar *content); +XMLPUBFUN void + xmlSAX2AttributeDecl (void *ctx, + const xmlChar *elem, + const xmlChar *fullname, + int type, + int def, + const xmlChar *defaultValue, + xmlEnumerationPtr tree); +XMLPUBFUN void + xmlSAX2ElementDecl (void *ctx, + const xmlChar *name, + int type, + xmlElementContentPtr content); +XMLPUBFUN void + xmlSAX2NotationDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId); +XMLPUBFUN void + xmlSAX2UnparsedEntityDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId, + const xmlChar *notationName); + +XMLPUBFUN void + xmlSAX2StartDocument (void *ctx); +XMLPUBFUN void + xmlSAX2EndDocument (void *ctx); +#if defined(LIBXML_SAX1_ENABLED) || defined(LIBXML_HTML_ENABLED) || \ + defined(LIBXML_WRITER_ENABLED) || defined(LIBXML_LEGACY_ENABLED) +XMLPUBFUN void + xmlSAX2StartElement (void *ctx, + const xmlChar *fullname, + const xmlChar **atts); +XMLPUBFUN void + xmlSAX2EndElement (void *ctx, + const xmlChar *name); +#endif /* LIBXML_SAX1_ENABLED or LIBXML_HTML_ENABLED or LIBXML_LEGACY_ENABLED */ +XMLPUBFUN void + xmlSAX2StartElementNs (void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *URI, + int nb_namespaces, + const xmlChar **namespaces, + int nb_attributes, + int nb_defaulted, + const xmlChar **attributes); +XMLPUBFUN void + xmlSAX2EndElementNs (void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *URI); +XMLPUBFUN void + xmlSAX2Reference (void *ctx, + const xmlChar *name); +XMLPUBFUN void + xmlSAX2Characters (void *ctx, + const xmlChar *ch, + int len); +XMLPUBFUN void + xmlSAX2IgnorableWhitespace (void *ctx, + const xmlChar *ch, + int len); +XMLPUBFUN void + xmlSAX2ProcessingInstruction (void *ctx, + const xmlChar *target, + const xmlChar *data); +XMLPUBFUN void + xmlSAX2Comment (void *ctx, + const xmlChar *value); +XMLPUBFUN void + xmlSAX2CDataBlock (void *ctx, + const xmlChar *value, + int len); + +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN int + xmlSAXDefaultVersion (int version); +#endif /* LIBXML_SAX1_ENABLED */ + +XMLPUBFUN int + xmlSAXVersion (xmlSAXHandler *hdlr, + int version); +XMLPUBFUN void + xmlSAX2InitDefaultSAXHandler (xmlSAXHandler *hdlr, + int warning); +#ifdef LIBXML_HTML_ENABLED +XMLPUBFUN void + xmlSAX2InitHtmlDefaultSAXHandler(xmlSAXHandler *hdlr); +XML_DEPRECATED +XMLPUBFUN void + htmlDefaultSAXHandlerInit (void); +#endif +XML_DEPRECATED +XMLPUBFUN void + xmlDefaultSAXHandlerInit (void); +#ifdef __cplusplus +} +#endif +#endif /* __XML_SAX2_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/c14n.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/c14n.h new file mode 100644 index 000000000..8ccd1cef6 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/c14n.h @@ -0,0 +1,115 @@ +/* + * Summary: Provide Canonical XML and Exclusive XML Canonicalization + * Description: the c14n modules provides a + * + * "Canonical XML" implementation + * http://www.w3.org/TR/xml-c14n + * + * and an + * + * "Exclusive XML Canonicalization" implementation + * http://www.w3.org/TR/xml-exc-c14n + + * Copy: See Copyright for the status of this software. + * + * Author: Aleksey Sanin + */ +#ifndef __XML_C14N_H__ +#define __XML_C14N_H__ + +#include + +#ifdef LIBXML_C14N_ENABLED + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +/* + * XML Canonicalization + * http://www.w3.org/TR/xml-c14n + * + * Exclusive XML Canonicalization + * http://www.w3.org/TR/xml-exc-c14n + * + * Canonical form of an XML document could be created if and only if + * a) default attributes (if any) are added to all nodes + * b) all character and parsed entity references are resolved + * In order to achieve this in libxml2 the document MUST be loaded with + * following options: XML_PARSE_DTDATTR | XML_PARSE_NOENT + */ + +/* + * xmlC14NMode: + * + * Predefined values for C14N modes + * + */ +typedef enum { + XML_C14N_1_0 = 0, /* Original C14N 1.0 spec */ + XML_C14N_EXCLUSIVE_1_0 = 1, /* Exclusive C14N 1.0 spec */ + XML_C14N_1_1 = 2 /* C14N 1.1 spec */ +} xmlC14NMode; + +XMLPUBFUN int + xmlC14NDocSaveTo (xmlDocPtr doc, + xmlNodeSetPtr nodes, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + xmlOutputBufferPtr buf); + +XMLPUBFUN int + xmlC14NDocDumpMemory (xmlDocPtr doc, + xmlNodeSetPtr nodes, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + xmlChar **doc_txt_ptr); + +XMLPUBFUN int + xmlC14NDocSave (xmlDocPtr doc, + xmlNodeSetPtr nodes, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + const char* filename, + int compression); + + +/** + * This is the core C14N function + */ +/** + * xmlC14NIsVisibleCallback: + * @user_data: user data + * @node: the current node + * @parent: the parent node + * + * Signature for a C14N callback on visible nodes + * + * Returns 1 if the node should be included + */ +typedef int (*xmlC14NIsVisibleCallback) (void* user_data, + xmlNodePtr node, + xmlNodePtr parent); + +XMLPUBFUN int + xmlC14NExecute (xmlDocPtr doc, + xmlC14NIsVisibleCallback is_visible_callback, + void* user_data, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + xmlOutputBufferPtr buf); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* LIBXML_C14N_ENABLED */ +#endif /* __XML_C14N_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/catalog.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/catalog.h new file mode 100644 index 000000000..02fa7ab2a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/catalog.h @@ -0,0 +1,182 @@ +/** + * Summary: interfaces to the Catalog handling system + * Description: the catalog module implements the support for + * XML Catalogs and SGML catalogs + * + * SGML Open Technical Resolution TR9401:1997. + * http://www.jclark.com/sp/catalog.htm + * + * XML Catalogs Working Draft 06 August 2001 + * http://www.oasis-open.org/committees/entity/spec-2001-08-06.html + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_CATALOG_H__ +#define __XML_CATALOG_H__ + +#include + +#include +#include +#include + +#ifdef LIBXML_CATALOG_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XML_CATALOGS_NAMESPACE: + * + * The namespace for the XML Catalogs elements. + */ +#define XML_CATALOGS_NAMESPACE \ + (const xmlChar *) "urn:oasis:names:tc:entity:xmlns:xml:catalog" +/** + * XML_CATALOG_PI: + * + * The specific XML Catalog Processing Instruction name. + */ +#define XML_CATALOG_PI \ + (const xmlChar *) "oasis-xml-catalog" + +/* + * The API is voluntarily limited to general cataloging. + */ +typedef enum { + XML_CATA_PREFER_NONE = 0, + XML_CATA_PREFER_PUBLIC = 1, + XML_CATA_PREFER_SYSTEM +} xmlCatalogPrefer; + +typedef enum { + XML_CATA_ALLOW_NONE = 0, + XML_CATA_ALLOW_GLOBAL = 1, + XML_CATA_ALLOW_DOCUMENT = 2, + XML_CATA_ALLOW_ALL = 3 +} xmlCatalogAllow; + +typedef struct _xmlCatalog xmlCatalog; +typedef xmlCatalog *xmlCatalogPtr; + +/* + * Operations on a given catalog. + */ +XMLPUBFUN xmlCatalogPtr + xmlNewCatalog (int sgml); +XMLPUBFUN xmlCatalogPtr + xmlLoadACatalog (const char *filename); +XMLPUBFUN xmlCatalogPtr + xmlLoadSGMLSuperCatalog (const char *filename); +XMLPUBFUN int + xmlConvertSGMLCatalog (xmlCatalogPtr catal); +XMLPUBFUN int + xmlACatalogAdd (xmlCatalogPtr catal, + const xmlChar *type, + const xmlChar *orig, + const xmlChar *replace); +XMLPUBFUN int + xmlACatalogRemove (xmlCatalogPtr catal, + const xmlChar *value); +XMLPUBFUN xmlChar * + xmlACatalogResolve (xmlCatalogPtr catal, + const xmlChar *pubID, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlACatalogResolveSystem(xmlCatalogPtr catal, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlACatalogResolvePublic(xmlCatalogPtr catal, + const xmlChar *pubID); +XMLPUBFUN xmlChar * + xmlACatalogResolveURI (xmlCatalogPtr catal, + const xmlChar *URI); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlACatalogDump (xmlCatalogPtr catal, + FILE *out); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN void + xmlFreeCatalog (xmlCatalogPtr catal); +XMLPUBFUN int + xmlCatalogIsEmpty (xmlCatalogPtr catal); + +/* + * Global operations. + */ +XMLPUBFUN void + xmlInitializeCatalog (void); +XMLPUBFUN int + xmlLoadCatalog (const char *filename); +XMLPUBFUN void + xmlLoadCatalogs (const char *paths); +XMLPUBFUN void + xmlCatalogCleanup (void); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlCatalogDump (FILE *out); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN xmlChar * + xmlCatalogResolve (const xmlChar *pubID, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlCatalogResolveSystem (const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlCatalogResolvePublic (const xmlChar *pubID); +XMLPUBFUN xmlChar * + xmlCatalogResolveURI (const xmlChar *URI); +XMLPUBFUN int + xmlCatalogAdd (const xmlChar *type, + const xmlChar *orig, + const xmlChar *replace); +XMLPUBFUN int + xmlCatalogRemove (const xmlChar *value); +XMLPUBFUN xmlDocPtr + xmlParseCatalogFile (const char *filename); +XMLPUBFUN int + xmlCatalogConvert (void); + +/* + * Strictly minimal interfaces for per-document catalogs used + * by the parser. + */ +XMLPUBFUN void + xmlCatalogFreeLocal (void *catalogs); +XMLPUBFUN void * + xmlCatalogAddLocal (void *catalogs, + const xmlChar *URL); +XMLPUBFUN xmlChar * + xmlCatalogLocalResolve (void *catalogs, + const xmlChar *pubID, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlCatalogLocalResolveURI(void *catalogs, + const xmlChar *URI); +/* + * Preference settings. + */ +XMLPUBFUN int + xmlCatalogSetDebug (int level); +XMLPUBFUN xmlCatalogPrefer + xmlCatalogSetDefaultPrefer(xmlCatalogPrefer prefer); +XMLPUBFUN void + xmlCatalogSetDefaults (xmlCatalogAllow allow); +XMLPUBFUN xmlCatalogAllow + xmlCatalogGetDefaults (void); + + +/* DEPRECATED interfaces */ +XMLPUBFUN const xmlChar * + xmlCatalogGetSystem (const xmlChar *sysID); +XMLPUBFUN const xmlChar * + xmlCatalogGetPublic (const xmlChar *pubID); + +#ifdef __cplusplus +} +#endif +#endif /* LIBXML_CATALOG_ENABLED */ +#endif /* __XML_CATALOG_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/chvalid.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/chvalid.h new file mode 100644 index 000000000..8225c95ee --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/chvalid.h @@ -0,0 +1,230 @@ +/* + * Summary: Unicode character range checking + * Description: this module exports interfaces for the character + * range validation APIs + * + * This file is automatically generated from the cvs source + * definition files using the genChRanges.py Python script + * + * Generation date: Mon Mar 27 11:09:48 2006 + * Sources: chvalid.def + * Author: William Brack + */ + +#ifndef __XML_CHVALID_H__ +#define __XML_CHVALID_H__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Define our typedefs and structures + * + */ +typedef struct _xmlChSRange xmlChSRange; +typedef xmlChSRange *xmlChSRangePtr; +struct _xmlChSRange { + unsigned short low; + unsigned short high; +}; + +typedef struct _xmlChLRange xmlChLRange; +typedef xmlChLRange *xmlChLRangePtr; +struct _xmlChLRange { + unsigned int low; + unsigned int high; +}; + +typedef struct _xmlChRangeGroup xmlChRangeGroup; +typedef xmlChRangeGroup *xmlChRangeGroupPtr; +struct _xmlChRangeGroup { + int nbShortRange; + int nbLongRange; + const xmlChSRange *shortRange; /* points to an array of ranges */ + const xmlChLRange *longRange; +}; + +/** + * Range checking routine + */ +XMLPUBFUN int + xmlCharInRange(unsigned int val, const xmlChRangeGroup *group); + + +/** + * xmlIsBaseChar_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBaseChar_ch(c) (((0x41 <= (c)) && ((c) <= 0x5a)) || \ + ((0x61 <= (c)) && ((c) <= 0x7a)) || \ + ((0xc0 <= (c)) && ((c) <= 0xd6)) || \ + ((0xd8 <= (c)) && ((c) <= 0xf6)) || \ + (0xf8 <= (c))) + +/** + * xmlIsBaseCharQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBaseCharQ(c) (((c) < 0x100) ? \ + xmlIsBaseChar_ch((c)) : \ + xmlCharInRange((c), &xmlIsBaseCharGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsBaseCharGroup; + +/** + * xmlIsBlank_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBlank_ch(c) (((c) == 0x20) || \ + ((0x9 <= (c)) && ((c) <= 0xa)) || \ + ((c) == 0xd)) + +/** + * xmlIsBlankQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBlankQ(c) (((c) < 0x100) ? \ + xmlIsBlank_ch((c)) : 0) + + +/** + * xmlIsChar_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsChar_ch(c) (((0x9 <= (c)) && ((c) <= 0xa)) || \ + ((c) == 0xd) || \ + (0x20 <= (c))) + +/** + * xmlIsCharQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsCharQ(c) (((c) < 0x100) ? \ + xmlIsChar_ch((c)) :\ + (((0x100 <= (c)) && ((c) <= 0xd7ff)) || \ + ((0xe000 <= (c)) && ((c) <= 0xfffd)) || \ + ((0x10000 <= (c)) && ((c) <= 0x10ffff)))) + +XMLPUBVAR const xmlChRangeGroup xmlIsCharGroup; + +/** + * xmlIsCombiningQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsCombiningQ(c) (((c) < 0x100) ? \ + 0 : \ + xmlCharInRange((c), &xmlIsCombiningGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsCombiningGroup; + +/** + * xmlIsDigit_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsDigit_ch(c) (((0x30 <= (c)) && ((c) <= 0x39))) + +/** + * xmlIsDigitQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsDigitQ(c) (((c) < 0x100) ? \ + xmlIsDigit_ch((c)) : \ + xmlCharInRange((c), &xmlIsDigitGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsDigitGroup; + +/** + * xmlIsExtender_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsExtender_ch(c) (((c) == 0xb7)) + +/** + * xmlIsExtenderQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsExtenderQ(c) (((c) < 0x100) ? \ + xmlIsExtender_ch((c)) : \ + xmlCharInRange((c), &xmlIsExtenderGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsExtenderGroup; + +/** + * xmlIsIdeographicQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsIdeographicQ(c) (((c) < 0x100) ? \ + 0 :\ + (((0x4e00 <= (c)) && ((c) <= 0x9fa5)) || \ + ((c) == 0x3007) || \ + ((0x3021 <= (c)) && ((c) <= 0x3029)))) + +XMLPUBVAR const xmlChRangeGroup xmlIsIdeographicGroup; +XMLPUBVAR const unsigned char xmlIsPubidChar_tab[256]; + +/** + * xmlIsPubidChar_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsPubidChar_ch(c) (xmlIsPubidChar_tab[(c)]) + +/** + * xmlIsPubidCharQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsPubidCharQ(c) (((c) < 0x100) ? \ + xmlIsPubidChar_ch((c)) : 0) + +XMLPUBFUN int + xmlIsBaseChar(unsigned int ch); +XMLPUBFUN int + xmlIsBlank(unsigned int ch); +XMLPUBFUN int + xmlIsChar(unsigned int ch); +XMLPUBFUN int + xmlIsCombining(unsigned int ch); +XMLPUBFUN int + xmlIsDigit(unsigned int ch); +XMLPUBFUN int + xmlIsExtender(unsigned int ch); +XMLPUBFUN int + xmlIsIdeographic(unsigned int ch); +XMLPUBFUN int + xmlIsPubidChar(unsigned int ch); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_CHVALID_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/debugXML.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/debugXML.h new file mode 100644 index 000000000..1332dd73d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/debugXML.h @@ -0,0 +1,217 @@ +/* + * Summary: Tree debugging APIs + * Description: Interfaces to a set of routines used for debugging the tree + * produced by the XML parser. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __DEBUG_XML__ +#define __DEBUG_XML__ +#include +#include +#include + +#ifdef LIBXML_DEBUG_ENABLED + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The standard Dump routines. + */ +XMLPUBFUN void + xmlDebugDumpString (FILE *output, + const xmlChar *str); +XMLPUBFUN void + xmlDebugDumpAttr (FILE *output, + xmlAttrPtr attr, + int depth); +XMLPUBFUN void + xmlDebugDumpAttrList (FILE *output, + xmlAttrPtr attr, + int depth); +XMLPUBFUN void + xmlDebugDumpOneNode (FILE *output, + xmlNodePtr node, + int depth); +XMLPUBFUN void + xmlDebugDumpNode (FILE *output, + xmlNodePtr node, + int depth); +XMLPUBFUN void + xmlDebugDumpNodeList (FILE *output, + xmlNodePtr node, + int depth); +XMLPUBFUN void + xmlDebugDumpDocumentHead(FILE *output, + xmlDocPtr doc); +XMLPUBFUN void + xmlDebugDumpDocument (FILE *output, + xmlDocPtr doc); +XMLPUBFUN void + xmlDebugDumpDTD (FILE *output, + xmlDtdPtr dtd); +XMLPUBFUN void + xmlDebugDumpEntities (FILE *output, + xmlDocPtr doc); + +/**************************************************************** + * * + * Checking routines * + * * + ****************************************************************/ + +XMLPUBFUN int + xmlDebugCheckDocument (FILE * output, + xmlDocPtr doc); + +/**************************************************************** + * * + * XML shell helpers * + * * + ****************************************************************/ + +XMLPUBFUN void + xmlLsOneNode (FILE *output, xmlNodePtr node); +XMLPUBFUN int + xmlLsCountNode (xmlNodePtr node); + +XMLPUBFUN const char * + xmlBoolToText (int boolval); + +/**************************************************************** + * * + * The XML shell related structures and functions * + * * + ****************************************************************/ + +#ifdef LIBXML_XPATH_ENABLED +/** + * xmlShellReadlineFunc: + * @prompt: a string prompt + * + * This is a generic signature for the XML shell input function. + * + * Returns a string which will be freed by the Shell. + */ +typedef char * (* xmlShellReadlineFunc)(char *prompt); + +/** + * xmlShellCtxt: + * + * A debugging shell context. + * TODO: add the defined function tables. + */ +typedef struct _xmlShellCtxt xmlShellCtxt; +typedef xmlShellCtxt *xmlShellCtxtPtr; +struct _xmlShellCtxt { + char *filename; + xmlDocPtr doc; + xmlNodePtr node; + xmlXPathContextPtr pctxt; + int loaded; + FILE *output; + xmlShellReadlineFunc input; +}; + +/** + * xmlShellCmd: + * @ctxt: a shell context + * @arg: a string argument + * @node: a first node + * @node2: a second node + * + * This is a generic signature for the XML shell functions. + * + * Returns an int, negative returns indicating errors. + */ +typedef int (* xmlShellCmd) (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); + +XMLPUBFUN void + xmlShellPrintXPathError (int errorType, + const char *arg); +XMLPUBFUN void + xmlShellPrintXPathResult(xmlXPathObjectPtr list); +XMLPUBFUN int + xmlShellList (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellBase (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellDir (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellLoad (xmlShellCtxtPtr ctxt, + char *filename, + xmlNodePtr node, + xmlNodePtr node2); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlShellPrintNode (xmlNodePtr node); +XMLPUBFUN int + xmlShellCat (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellWrite (xmlShellCtxtPtr ctxt, + char *filename, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellSave (xmlShellCtxtPtr ctxt, + char *filename, + xmlNodePtr node, + xmlNodePtr node2); +#endif /* LIBXML_OUTPUT_ENABLED */ +#ifdef LIBXML_VALID_ENABLED +XMLPUBFUN int + xmlShellValidate (xmlShellCtxtPtr ctxt, + char *dtd, + xmlNodePtr node, + xmlNodePtr node2); +#endif /* LIBXML_VALID_ENABLED */ +XMLPUBFUN int + xmlShellDu (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr tree, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellPwd (xmlShellCtxtPtr ctxt, + char *buffer, + xmlNodePtr node, + xmlNodePtr node2); + +/* + * The Shell interface. + */ +XMLPUBFUN void + xmlShell (xmlDocPtr doc, + const char *filename, + xmlShellReadlineFunc input, + FILE *output); + +#endif /* LIBXML_XPATH_ENABLED */ + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_DEBUG_ENABLED */ +#endif /* __DEBUG_XML__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/dict.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/dict.h new file mode 100644 index 000000000..22aa3d9db --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/dict.h @@ -0,0 +1,82 @@ +/* + * Summary: string dictionary + * Description: dictionary of reusable strings, just used to avoid allocation + * and freeing operations. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_DICT_H__ +#define __XML_DICT_H__ + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The dictionary. + */ +typedef struct _xmlDict xmlDict; +typedef xmlDict *xmlDictPtr; + +/* + * Initializer + */ +XML_DEPRECATED +XMLPUBFUN int xmlInitializeDict(void); + +/* + * Constructor and destructor. + */ +XMLPUBFUN xmlDictPtr + xmlDictCreate (void); +XMLPUBFUN size_t + xmlDictSetLimit (xmlDictPtr dict, + size_t limit); +XMLPUBFUN size_t + xmlDictGetUsage (xmlDictPtr dict); +XMLPUBFUN xmlDictPtr + xmlDictCreateSub(xmlDictPtr sub); +XMLPUBFUN int + xmlDictReference(xmlDictPtr dict); +XMLPUBFUN void + xmlDictFree (xmlDictPtr dict); + +/* + * Lookup of entry in the dictionary. + */ +XMLPUBFUN const xmlChar * + xmlDictLookup (xmlDictPtr dict, + const xmlChar *name, + int len); +XMLPUBFUN const xmlChar * + xmlDictExists (xmlDictPtr dict, + const xmlChar *name, + int len); +XMLPUBFUN const xmlChar * + xmlDictQLookup (xmlDictPtr dict, + const xmlChar *prefix, + const xmlChar *name); +XMLPUBFUN int + xmlDictOwns (xmlDictPtr dict, + const xmlChar *str); +XMLPUBFUN int + xmlDictSize (xmlDictPtr dict); + +/* + * Cleanup function + */ +XML_DEPRECATED +XMLPUBFUN void + xmlDictCleanup (void); + +#ifdef __cplusplus +} +#endif +#endif /* ! __XML_DICT_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/encoding.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/encoding.h new file mode 100644 index 000000000..599a03e12 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/encoding.h @@ -0,0 +1,244 @@ +/* + * Summary: interface for the encoding conversion functions + * Description: interface for the encoding conversion functions needed for + * XML basic encoding and iconv() support. + * + * Related specs are + * rfc2044 (UTF-8 and UTF-16) F. Yergeau Alis Technologies + * [ISO-10646] UTF-8 and UTF-16 in Annexes + * [ISO-8859-1] ISO Latin-1 characters codes. + * [UNICODE] The Unicode Consortium, "The Unicode Standard -- + * Worldwide Character Encoding -- Version 1.0", Addison- + * Wesley, Volume 1, 1991, Volume 2, 1992. UTF-8 is + * described in Unicode Technical Report #4. + * [US-ASCII] Coded Character Set--7-bit American Standard Code for + * Information Interchange, ANSI X3.4-1986. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_CHAR_ENCODING_H__ +#define __XML_CHAR_ENCODING_H__ + +#include + +#ifdef LIBXML_ICONV_ENABLED +#include +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum { + XML_ENC_ERR_SUCCESS = 0, + XML_ENC_ERR_SPACE = -1, + XML_ENC_ERR_INPUT = -2, + XML_ENC_ERR_PARTIAL = -3, + XML_ENC_ERR_INTERNAL = -4, + XML_ENC_ERR_MEMORY = -5 +} xmlCharEncError; + +/* + * xmlCharEncoding: + * + * Predefined values for some standard encodings. + * Libxml does not do beforehand translation on UTF8 and ISOLatinX. + * It also supports ASCII, ISO-8859-1, and UTF16 (LE and BE) by default. + * + * Anything else would have to be translated to UTF8 before being + * given to the parser itself. The BOM for UTF16 and the encoding + * declaration are looked at and a converter is looked for at that + * point. If not found the parser stops here as asked by the XML REC. A + * converter can be registered by the user using xmlRegisterCharEncodingHandler + * but the current form doesn't allow stateful transcoding (a serious + * problem agreed !). If iconv has been found it will be used + * automatically and allow stateful transcoding, the simplest is then + * to be sure to enable iconv and to provide iconv libs for the encoding + * support needed. + * + * Note that the generic "UTF-16" is not a predefined value. Instead, only + * the specific UTF-16LE and UTF-16BE are present. + */ +typedef enum { + XML_CHAR_ENCODING_ERROR= -1, /* No char encoding detected */ + XML_CHAR_ENCODING_NONE= 0, /* No char encoding detected */ + XML_CHAR_ENCODING_UTF8= 1, /* UTF-8 */ + XML_CHAR_ENCODING_UTF16LE= 2, /* UTF-16 little endian */ + XML_CHAR_ENCODING_UTF16BE= 3, /* UTF-16 big endian */ + XML_CHAR_ENCODING_UCS4LE= 4, /* UCS-4 little endian */ + XML_CHAR_ENCODING_UCS4BE= 5, /* UCS-4 big endian */ + XML_CHAR_ENCODING_EBCDIC= 6, /* EBCDIC uh! */ + XML_CHAR_ENCODING_UCS4_2143=7, /* UCS-4 unusual ordering */ + XML_CHAR_ENCODING_UCS4_3412=8, /* UCS-4 unusual ordering */ + XML_CHAR_ENCODING_UCS2= 9, /* UCS-2 */ + XML_CHAR_ENCODING_8859_1= 10,/* ISO-8859-1 ISO Latin 1 */ + XML_CHAR_ENCODING_8859_2= 11,/* ISO-8859-2 ISO Latin 2 */ + XML_CHAR_ENCODING_8859_3= 12,/* ISO-8859-3 */ + XML_CHAR_ENCODING_8859_4= 13,/* ISO-8859-4 */ + XML_CHAR_ENCODING_8859_5= 14,/* ISO-8859-5 */ + XML_CHAR_ENCODING_8859_6= 15,/* ISO-8859-6 */ + XML_CHAR_ENCODING_8859_7= 16,/* ISO-8859-7 */ + XML_CHAR_ENCODING_8859_8= 17,/* ISO-8859-8 */ + XML_CHAR_ENCODING_8859_9= 18,/* ISO-8859-9 */ + XML_CHAR_ENCODING_2022_JP= 19,/* ISO-2022-JP */ + XML_CHAR_ENCODING_SHIFT_JIS=20,/* Shift_JIS */ + XML_CHAR_ENCODING_EUC_JP= 21,/* EUC-JP */ + XML_CHAR_ENCODING_ASCII= 22 /* pure ASCII */ +} xmlCharEncoding; + +/** + * xmlCharEncodingInputFunc: + * @out: a pointer to an array of bytes to store the UTF-8 result + * @outlen: the length of @out + * @in: a pointer to an array of chars in the original encoding + * @inlen: the length of @in + * + * Take a block of chars in the original encoding and try to convert + * it to an UTF-8 block of chars out. + * + * Returns the number of bytes written, -1 if lack of space, or -2 + * if the transcoding failed. + * The value of @inlen after return is the number of octets consumed + * if the return value is positive, else unpredictiable. + * The value of @outlen after return is the number of octets consumed. + */ +typedef int (* xmlCharEncodingInputFunc)(unsigned char *out, int *outlen, + const unsigned char *in, int *inlen); + + +/** + * xmlCharEncodingOutputFunc: + * @out: a pointer to an array of bytes to store the result + * @outlen: the length of @out + * @in: a pointer to an array of UTF-8 chars + * @inlen: the length of @in + * + * Take a block of UTF-8 chars in and try to convert it to another + * encoding. + * Note: a first call designed to produce heading info is called with + * in = NULL. If stateful this should also initialize the encoder state. + * + * Returns the number of bytes written, -1 if lack of space, or -2 + * if the transcoding failed. + * The value of @inlen after return is the number of octets consumed + * if the return value is positive, else unpredictiable. + * The value of @outlen after return is the number of octets produced. + */ +typedef int (* xmlCharEncodingOutputFunc)(unsigned char *out, int *outlen, + const unsigned char *in, int *inlen); + + +/* + * Block defining the handlers for non UTF-8 encodings. + * If iconv is supported, there are two extra fields. + */ +typedef struct _xmlCharEncodingHandler xmlCharEncodingHandler; +typedef xmlCharEncodingHandler *xmlCharEncodingHandlerPtr; +struct _xmlCharEncodingHandler { + char *name; + xmlCharEncodingInputFunc input; + xmlCharEncodingOutputFunc output; +#ifdef LIBXML_ICONV_ENABLED + iconv_t iconv_in; + iconv_t iconv_out; +#endif /* LIBXML_ICONV_ENABLED */ +#ifdef LIBXML_ICU_ENABLED + struct _uconv_t *uconv_in; + struct _uconv_t *uconv_out; +#endif /* LIBXML_ICU_ENABLED */ +}; + +/* + * Interfaces for encoding handlers. + */ +XML_DEPRECATED +XMLPUBFUN void + xmlInitCharEncodingHandlers (void); +XML_DEPRECATED +XMLPUBFUN void + xmlCleanupCharEncodingHandlers (void); +XMLPUBFUN void + xmlRegisterCharEncodingHandler (xmlCharEncodingHandlerPtr handler); +XMLPUBFUN int + xmlLookupCharEncodingHandler (xmlCharEncoding enc, + xmlCharEncodingHandlerPtr *out); +XMLPUBFUN int + xmlOpenCharEncodingHandler (const char *name, + int output, + xmlCharEncodingHandlerPtr *out); +XMLPUBFUN xmlCharEncodingHandlerPtr + xmlGetCharEncodingHandler (xmlCharEncoding enc); +XMLPUBFUN xmlCharEncodingHandlerPtr + xmlFindCharEncodingHandler (const char *name); +XMLPUBFUN xmlCharEncodingHandlerPtr + xmlNewCharEncodingHandler (const char *name, + xmlCharEncodingInputFunc input, + xmlCharEncodingOutputFunc output); + +/* + * Interfaces for encoding names and aliases. + */ +XMLPUBFUN int + xmlAddEncodingAlias (const char *name, + const char *alias); +XMLPUBFUN int + xmlDelEncodingAlias (const char *alias); +XMLPUBFUN const char * + xmlGetEncodingAlias (const char *alias); +XMLPUBFUN void + xmlCleanupEncodingAliases (void); +XMLPUBFUN xmlCharEncoding + xmlParseCharEncoding (const char *name); +XMLPUBFUN const char * + xmlGetCharEncodingName (xmlCharEncoding enc); + +/* + * Interfaces directly used by the parsers. + */ +XMLPUBFUN xmlCharEncoding + xmlDetectCharEncoding (const unsigned char *in, + int len); + +/** DOC_DISABLE */ +struct _xmlBuffer; +/** DOC_ENABLE */ +XMLPUBFUN int + xmlCharEncOutFunc (xmlCharEncodingHandler *handler, + struct _xmlBuffer *out, + struct _xmlBuffer *in); + +XMLPUBFUN int + xmlCharEncInFunc (xmlCharEncodingHandler *handler, + struct _xmlBuffer *out, + struct _xmlBuffer *in); +XML_DEPRECATED +XMLPUBFUN int + xmlCharEncFirstLine (xmlCharEncodingHandler *handler, + struct _xmlBuffer *out, + struct _xmlBuffer *in); +XMLPUBFUN int + xmlCharEncCloseFunc (xmlCharEncodingHandler *handler); + +/* + * Export a few useful functions + */ +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN int + UTF8Toisolat1 (unsigned char *out, + int *outlen, + const unsigned char *in, + int *inlen); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN int + isolat1ToUTF8 (unsigned char *out, + int *outlen, + const unsigned char *in, + int *inlen); +#ifdef __cplusplus +} +#endif + +#endif /* __XML_CHAR_ENCODING_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/entities.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/entities.h new file mode 100644 index 000000000..a0cfca813 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/entities.h @@ -0,0 +1,166 @@ +/* + * Summary: interface for the XML entities handling + * Description: this module provides some of the entity API needed + * for the parser and applications. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_ENTITIES_H__ +#define __XML_ENTITIES_H__ + +/** DOC_DISABLE */ +#include +#define XML_TREE_INTERNALS +#include +#undef XML_TREE_INTERNALS +/** DOC_ENABLE */ + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The different valid entity types. + */ +typedef enum { + XML_INTERNAL_GENERAL_ENTITY = 1, + XML_EXTERNAL_GENERAL_PARSED_ENTITY = 2, + XML_EXTERNAL_GENERAL_UNPARSED_ENTITY = 3, + XML_INTERNAL_PARAMETER_ENTITY = 4, + XML_EXTERNAL_PARAMETER_ENTITY = 5, + XML_INTERNAL_PREDEFINED_ENTITY = 6 +} xmlEntityType; + +/* + * An unit of storage for an entity, contains the string, the value + * and the linkind data needed for the linking in the hash table. + */ + +struct _xmlEntity { + void *_private; /* application data */ + xmlElementType type; /* XML_ENTITY_DECL, must be second ! */ + const xmlChar *name; /* Entity name */ + struct _xmlNode *children; /* First child link */ + struct _xmlNode *last; /* Last child link */ + struct _xmlDtd *parent; /* -> DTD */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + xmlChar *orig; /* content without ref substitution */ + xmlChar *content; /* content or ndata if unparsed */ + int length; /* the content length */ + xmlEntityType etype; /* The entity type */ + const xmlChar *ExternalID; /* External identifier for PUBLIC */ + const xmlChar *SystemID; /* URI for a SYSTEM or PUBLIC Entity */ + + struct _xmlEntity *nexte; /* unused */ + const xmlChar *URI; /* the full URI as computed */ + int owner; /* unused */ + int flags; /* various flags */ + unsigned long expandedSize; /* expanded size */ +}; + +/* + * All entities are stored in an hash table. + * There is 2 separate hash tables for global and parameter entities. + */ + +typedef struct _xmlHashTable xmlEntitiesTable; +typedef xmlEntitiesTable *xmlEntitiesTablePtr; + +/* + * External functions: + */ + +#ifdef LIBXML_LEGACY_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlInitializePredefinedEntities (void); +#endif /* LIBXML_LEGACY_ENABLED */ + +XMLPUBFUN xmlEntityPtr + xmlNewEntity (xmlDocPtr doc, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content); +XMLPUBFUN void + xmlFreeEntity (xmlEntityPtr entity); +XMLPUBFUN int + xmlAddEntity (xmlDocPtr doc, + int extSubset, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content, + xmlEntityPtr *out); +XMLPUBFUN xmlEntityPtr + xmlAddDocEntity (xmlDocPtr doc, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content); +XMLPUBFUN xmlEntityPtr + xmlAddDtdEntity (xmlDocPtr doc, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content); +XMLPUBFUN xmlEntityPtr + xmlGetPredefinedEntity (const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlGetDocEntity (const xmlDoc *doc, + const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlGetDtdEntity (xmlDocPtr doc, + const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlGetParameterEntity (xmlDocPtr doc, + const xmlChar *name); +#ifdef LIBXML_LEGACY_ENABLED +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlEncodeEntities (xmlDocPtr doc, + const xmlChar *input); +#endif /* LIBXML_LEGACY_ENABLED */ +XMLPUBFUN xmlChar * + xmlEncodeEntitiesReentrant(xmlDocPtr doc, + const xmlChar *input); +XMLPUBFUN xmlChar * + xmlEncodeSpecialChars (const xmlDoc *doc, + const xmlChar *input); +XMLPUBFUN xmlEntitiesTablePtr + xmlCreateEntitiesTable (void); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlEntitiesTablePtr + xmlCopyEntitiesTable (xmlEntitiesTablePtr table); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlFreeEntitiesTable (xmlEntitiesTablePtr table); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlDumpEntitiesTable (xmlBufferPtr buf, + xmlEntitiesTablePtr table); +XMLPUBFUN void + xmlDumpEntityDecl (xmlBufferPtr buf, + xmlEntityPtr ent); +#endif /* LIBXML_OUTPUT_ENABLED */ +#ifdef LIBXML_LEGACY_ENABLED +XMLPUBFUN void + xmlCleanupPredefinedEntities(void); +#endif /* LIBXML_LEGACY_ENABLED */ + + +#ifdef __cplusplus +} +#endif + +# endif /* __XML_ENTITIES_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/globals.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/globals.h new file mode 100644 index 000000000..92f41312f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/globals.h @@ -0,0 +1,41 @@ +/* + * Summary: interface for all global variables of the library + * Description: Deprecated, don't use + * + * Copy: See Copyright for the status of this software. + */ + +#ifndef __XML_GLOBALS_H +#define __XML_GLOBALS_H + +#include + +/* + * This file was required to access global variables until version v2.12.0. + * + * These includes are for backward compatibility. + */ +#include +#include +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct _xmlGlobalState xmlGlobalState; +typedef xmlGlobalState *xmlGlobalStatePtr; + +XML_DEPRECATED XMLPUBFUN void +xmlInitializeGlobalState(xmlGlobalStatePtr gs); +XML_DEPRECATED XMLPUBFUN +xmlGlobalStatePtr xmlGetGlobalState(void); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_GLOBALS_H */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/hash.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/hash.h new file mode 100644 index 000000000..135b69669 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/hash.h @@ -0,0 +1,251 @@ +/* + * Summary: Chained hash tables + * Description: This module implements the hash table support used in + * various places in the library. + * + * Copy: See Copyright for the status of this software. + * + * Author: Bjorn Reese + */ + +#ifndef __XML_HASH_H__ +#define __XML_HASH_H__ + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The hash table. + */ +typedef struct _xmlHashTable xmlHashTable; +typedef xmlHashTable *xmlHashTablePtr; + +/* + * Recent version of gcc produce a warning when a function pointer is assigned + * to an object pointer, or vice versa. The following macro is a dirty hack + * to allow suppression of the warning. If your architecture has function + * pointers which are a different size than a void pointer, there may be some + * serious trouble within the library. + */ +/** + * XML_CAST_FPTR: + * @fptr: pointer to a function + * + * Macro to do a casting from an object pointer to a + * function pointer without encountering a warning from + * gcc + * + * #define XML_CAST_FPTR(fptr) (*(void **)(&fptr)) + * This macro violated ISO C aliasing rules (gcc4 on s390 broke) + * so it is disabled now + */ + +#define XML_CAST_FPTR(fptr) fptr + +/* + * function types: + */ +/** + * xmlHashDeallocator: + * @payload: the data in the hash + * @name: the name associated + * + * Callback to free data from a hash. + */ +typedef void (*xmlHashDeallocator)(void *payload, const xmlChar *name); +/** + * xmlHashCopier: + * @payload: the data in the hash + * @name: the name associated + * + * Callback to copy data from a hash. + * + * Returns a copy of the data or NULL in case of error. + */ +typedef void *(*xmlHashCopier)(void *payload, const xmlChar *name); +/** + * xmlHashScanner: + * @payload: the data in the hash + * @data: extra scanner data + * @name: the name associated + * + * Callback when scanning data in a hash with the simple scanner. + */ +typedef void (*xmlHashScanner)(void *payload, void *data, const xmlChar *name); +/** + * xmlHashScannerFull: + * @payload: the data in the hash + * @data: extra scanner data + * @name: the name associated + * @name2: the second name associated + * @name3: the third name associated + * + * Callback when scanning data in a hash with the full scanner. + */ +typedef void (*xmlHashScannerFull)(void *payload, void *data, + const xmlChar *name, const xmlChar *name2, + const xmlChar *name3); + +/* + * Constructor and destructor. + */ +XMLPUBFUN xmlHashTablePtr + xmlHashCreate (int size); +XMLPUBFUN xmlHashTablePtr + xmlHashCreateDict (int size, + xmlDictPtr dict); +XMLPUBFUN void + xmlHashFree (xmlHashTablePtr hash, + xmlHashDeallocator dealloc); +XMLPUBFUN void + xmlHashDefaultDeallocator(void *entry, + const xmlChar *name); + +/* + * Add a new entry to the hash table. + */ +XMLPUBFUN int + xmlHashAdd (xmlHashTablePtr hash, + const xmlChar *name, + void *userdata); +XMLPUBFUN int + xmlHashAddEntry (xmlHashTablePtr hash, + const xmlChar *name, + void *userdata); +XMLPUBFUN int + xmlHashUpdateEntry (xmlHashTablePtr hash, + const xmlChar *name, + void *userdata, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashAdd2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + void *userdata); +XMLPUBFUN int + xmlHashAddEntry2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + void *userdata); +XMLPUBFUN int + xmlHashUpdateEntry2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + void *userdata, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashAdd3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + void *userdata); +XMLPUBFUN int + xmlHashAddEntry3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + void *userdata); +XMLPUBFUN int + xmlHashUpdateEntry3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + void *userdata, + xmlHashDeallocator dealloc); + +/* + * Remove an entry from the hash table. + */ +XMLPUBFUN int + xmlHashRemoveEntry (xmlHashTablePtr hash, + const xmlChar *name, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashRemoveEntry2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashRemoveEntry3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + xmlHashDeallocator dealloc); + +/* + * Retrieve the payload. + */ +XMLPUBFUN void * + xmlHashLookup (xmlHashTablePtr hash, + const xmlChar *name); +XMLPUBFUN void * + xmlHashLookup2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2); +XMLPUBFUN void * + xmlHashLookup3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3); +XMLPUBFUN void * + xmlHashQLookup (xmlHashTablePtr hash, + const xmlChar *prefix, + const xmlChar *name); +XMLPUBFUN void * + xmlHashQLookup2 (xmlHashTablePtr hash, + const xmlChar *prefix, + const xmlChar *name, + const xmlChar *prefix2, + const xmlChar *name2); +XMLPUBFUN void * + xmlHashQLookup3 (xmlHashTablePtr hash, + const xmlChar *prefix, + const xmlChar *name, + const xmlChar *prefix2, + const xmlChar *name2, + const xmlChar *prefix3, + const xmlChar *name3); + +/* + * Helpers. + */ +XMLPUBFUN xmlHashTablePtr + xmlHashCopySafe (xmlHashTablePtr hash, + xmlHashCopier copy, + xmlHashDeallocator dealloc); +XMLPUBFUN xmlHashTablePtr + xmlHashCopy (xmlHashTablePtr hash, + xmlHashCopier copy); +XMLPUBFUN int + xmlHashSize (xmlHashTablePtr hash); +XMLPUBFUN void + xmlHashScan (xmlHashTablePtr hash, + xmlHashScanner scan, + void *data); +XMLPUBFUN void + xmlHashScan3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + xmlHashScanner scan, + void *data); +XMLPUBFUN void + xmlHashScanFull (xmlHashTablePtr hash, + xmlHashScannerFull scan, + void *data); +XMLPUBFUN void + xmlHashScanFull3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + xmlHashScannerFull scan, + void *data); +#ifdef __cplusplus +} +#endif +#endif /* ! __XML_HASH_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/list.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/list.h new file mode 100644 index 000000000..1fa76aff0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/list.h @@ -0,0 +1,137 @@ +/* + * Summary: lists interfaces + * Description: this module implement the list support used in + * various place in the library. + * + * Copy: See Copyright for the status of this software. + * + * Author: Gary Pennington + */ + +#ifndef __XML_LINK_INCLUDE__ +#define __XML_LINK_INCLUDE__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct _xmlLink xmlLink; +typedef xmlLink *xmlLinkPtr; + +typedef struct _xmlList xmlList; +typedef xmlList *xmlListPtr; + +/** + * xmlListDeallocator: + * @lk: the data to deallocate + * + * Callback function used to free data from a list. + */ +typedef void (*xmlListDeallocator) (xmlLinkPtr lk); +/** + * xmlListDataCompare: + * @data0: the first data + * @data1: the second data + * + * Callback function used to compare 2 data. + * + * Returns 0 is equality, -1 or 1 otherwise depending on the ordering. + */ +typedef int (*xmlListDataCompare) (const void *data0, const void *data1); +/** + * xmlListWalker: + * @data: the data found in the list + * @user: extra user provided data to the walker + * + * Callback function used when walking a list with xmlListWalk(). + * + * Returns 0 to stop walking the list, 1 otherwise. + */ +typedef int (*xmlListWalker) (const void *data, void *user); + +/* Creation/Deletion */ +XMLPUBFUN xmlListPtr + xmlListCreate (xmlListDeallocator deallocator, + xmlListDataCompare compare); +XMLPUBFUN void + xmlListDelete (xmlListPtr l); + +/* Basic Operators */ +XMLPUBFUN void * + xmlListSearch (xmlListPtr l, + void *data); +XMLPUBFUN void * + xmlListReverseSearch (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListInsert (xmlListPtr l, + void *data) ; +XMLPUBFUN int + xmlListAppend (xmlListPtr l, + void *data) ; +XMLPUBFUN int + xmlListRemoveFirst (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListRemoveLast (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListRemoveAll (xmlListPtr l, + void *data); +XMLPUBFUN void + xmlListClear (xmlListPtr l); +XMLPUBFUN int + xmlListEmpty (xmlListPtr l); +XMLPUBFUN xmlLinkPtr + xmlListFront (xmlListPtr l); +XMLPUBFUN xmlLinkPtr + xmlListEnd (xmlListPtr l); +XMLPUBFUN int + xmlListSize (xmlListPtr l); + +XMLPUBFUN void + xmlListPopFront (xmlListPtr l); +XMLPUBFUN void + xmlListPopBack (xmlListPtr l); +XMLPUBFUN int + xmlListPushFront (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListPushBack (xmlListPtr l, + void *data); + +/* Advanced Operators */ +XMLPUBFUN void + xmlListReverse (xmlListPtr l); +XMLPUBFUN void + xmlListSort (xmlListPtr l); +XMLPUBFUN void + xmlListWalk (xmlListPtr l, + xmlListWalker walker, + void *user); +XMLPUBFUN void + xmlListReverseWalk (xmlListPtr l, + xmlListWalker walker, + void *user); +XMLPUBFUN void + xmlListMerge (xmlListPtr l1, + xmlListPtr l2); +XMLPUBFUN xmlListPtr + xmlListDup (xmlListPtr old); +XMLPUBFUN int + xmlListCopy (xmlListPtr cur, + xmlListPtr old); +/* Link operators */ +XMLPUBFUN void * + xmlLinkGetData (xmlLinkPtr lk); + +/* xmlListUnique() */ +/* xmlListSwap */ + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_LINK_INCLUDE__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanoftp.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanoftp.h new file mode 100644 index 000000000..ed3ac4f1f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanoftp.h @@ -0,0 +1,186 @@ +/* + * Summary: minimal FTP implementation + * Description: minimal FTP implementation allowing to fetch resources + * like external subset. This module is DEPRECATED, do not + * use any of its functions. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __NANO_FTP_H__ +#define __NANO_FTP_H__ + +#include + +#if defined(LIBXML_FTP_ENABLED) + +/* Needed for portability to Windows 64 bits */ +#if defined(_WIN32) +#include +#else +/** + * SOCKET: + * + * macro used to provide portability of code to windows sockets + */ +#define SOCKET int +/** + * INVALID_SOCKET: + * + * macro used to provide portability of code to windows sockets + * the value to be used when the socket is not valid + */ +#undef INVALID_SOCKET +#define INVALID_SOCKET (-1) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * ftpListCallback: + * @userData: user provided data for the callback + * @filename: the file name (including "->" when links are shown) + * @attrib: the attribute string + * @owner: the owner string + * @group: the group string + * @size: the file size + * @links: the link count + * @year: the year + * @month: the month + * @day: the day + * @hour: the hour + * @minute: the minute + * + * A callback for the xmlNanoFTPList command. + * Note that only one of year and day:minute are specified. + */ +typedef void (*ftpListCallback) (void *userData, + const char *filename, const char *attrib, + const char *owner, const char *group, + unsigned long size, int links, int year, + const char *month, int day, int hour, + int minute); +/** + * ftpDataCallback: + * @userData: the user provided context + * @data: the data received + * @len: its size in bytes + * + * A callback for the xmlNanoFTPGet command. + */ +typedef void (*ftpDataCallback) (void *userData, + const char *data, + int len); + +/* + * Init + */ +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPInit (void); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPCleanup (void); + +/* + * Creating/freeing contexts. + */ +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoFTPNewCtxt (const char *URL); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPFreeCtxt (void * ctx); +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoFTPConnectTo (const char *server, + int port); +/* + * Opening/closing session connections. + */ +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoFTPOpen (const char *URL); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPConnect (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPClose (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPQuit (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPScanProxy (const char *URL); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPProxy (const char *host, + int port, + const char *user, + const char *passwd, + int type); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPUpdateURL (void *ctx, + const char *URL); + +/* + * Rather internal commands. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPGetResponse (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPCheckResponse (void *ctx); + +/* + * CD/DIR/GET handlers. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPCwd (void *ctx, + const char *directory); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPDele (void *ctx, + const char *file); + +XML_DEPRECATED +XMLPUBFUN SOCKET + xmlNanoFTPGetConnection (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPCloseConnection(void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPList (void *ctx, + ftpListCallback callback, + void *userData, + const char *filename); +XML_DEPRECATED +XMLPUBFUN SOCKET + xmlNanoFTPGetSocket (void *ctx, + const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPGet (void *ctx, + ftpDataCallback callback, + void *userData, + const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPRead (void *ctx, + void *dest, + int len); + +#ifdef __cplusplus +} +#endif +#endif /* defined(LIBXML_FTP_ENABLED) || defined(LIBXML_LEGACY_ENABLED) */ +#endif /* __NANO_FTP_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanohttp.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanohttp.h new file mode 100644 index 000000000..c70d1c26b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/nanohttp.h @@ -0,0 +1,98 @@ +/* + * Summary: minimal HTTP implementation + * Description: minimal HTTP implementation allowing to fetch resources + * like external subset. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __NANO_HTTP_H__ +#define __NANO_HTTP_H__ + +#include + +#ifdef LIBXML_HTTP_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif +XML_DEPRECATED +XMLPUBFUN void + xmlNanoHTTPInit (void); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoHTTPCleanup (void); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoHTTPScanProxy (const char *URL); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoHTTPFetch (const char *URL, + const char *filename, + char **contentType); +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoHTTPMethod (const char *URL, + const char *method, + const char *input, + char **contentType, + const char *headers, + int ilen); +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoHTTPMethodRedir (const char *URL, + const char *method, + const char *input, + char **contentType, + char **redir, + const char *headers, + int ilen); +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoHTTPOpen (const char *URL, + char **contentType); +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoHTTPOpenRedir (const char *URL, + char **contentType, + char **redir); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoHTTPReturnCode (void *ctx); +XML_DEPRECATED +XMLPUBFUN const char * + xmlNanoHTTPAuthHeader (void *ctx); +XML_DEPRECATED +XMLPUBFUN const char * + xmlNanoHTTPRedir (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoHTTPContentLength( void * ctx ); +XML_DEPRECATED +XMLPUBFUN const char * + xmlNanoHTTPEncoding (void *ctx); +XML_DEPRECATED +XMLPUBFUN const char * + xmlNanoHTTPMimeType (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoHTTPRead (void *ctx, + void *dest, + int len); +#ifdef LIBXML_OUTPUT_ENABLED +XML_DEPRECATED +XMLPUBFUN int + xmlNanoHTTPSave (void *ctxt, + const char *filename); +#endif /* LIBXML_OUTPUT_ENABLED */ +XML_DEPRECATED +XMLPUBFUN void + xmlNanoHTTPClose (void *ctx); +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_HTTP_ENABLED */ +#endif /* __NANO_HTTP_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parser.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parser.h new file mode 100644 index 000000000..78d29cada --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parser.h @@ -0,0 +1,1390 @@ +/* + * Summary: the core parser module + * Description: Interfaces, constants and types related to the XML parser + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_PARSER_H__ +#define __XML_PARSER_H__ + +/** DOC_DISABLE */ +#include +#define XML_TREE_INTERNALS +#include +#undef XML_TREE_INTERNALS +#include +#include +#include +#include +#include +#include +#include +#include +#include +/* for compatibility */ +#include +#include +/** DOC_ENABLE */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XML_DEFAULT_VERSION: + * + * The default version of XML used: 1.0 + */ +#define XML_DEFAULT_VERSION "1.0" + +/** + * xmlParserInput: + * + * An xmlParserInput is an input flow for the XML processor. + * Each entity parsed is associated an xmlParserInput (except the + * few predefined ones). This is the case both for internal entities + * - in which case the flow is already completely in memory - or + * external entities - in which case we use the buf structure for + * progressive reading and I18N conversions to the internal UTF-8 format. + */ + +/** + * xmlParserInputDeallocate: + * @str: the string to deallocate + * + * Callback for freeing some parser input allocations. + */ +typedef void (* xmlParserInputDeallocate)(xmlChar *str); + +struct _xmlParserInput { + /* Input buffer */ + xmlParserInputBufferPtr buf; /* UTF-8 encoded buffer */ + + const char *filename; /* The file analyzed, if any */ + const char *directory; /* unused */ + const xmlChar *base; /* Base of the array to parse */ + const xmlChar *cur; /* Current char being parsed */ + const xmlChar *end; /* end of the array to parse */ + int length; /* unused */ + int line; /* Current line */ + int col; /* Current column */ + unsigned long consumed; /* How many xmlChars already consumed */ + xmlParserInputDeallocate free; /* function to deallocate the base */ + const xmlChar *encoding; /* unused */ + const xmlChar *version; /* the version string for entity */ + int flags; /* Flags */ + int id; /* an unique identifier for the entity */ + unsigned long parentConsumed; /* unused */ + xmlEntityPtr entity; /* entity, if any */ +}; + +/** + * xmlParserNodeInfo: + * + * The parser can be asked to collect Node information, i.e. at what + * place in the file they were detected. + * NOTE: This is off by default and not very well tested. + */ +typedef struct _xmlParserNodeInfo xmlParserNodeInfo; +typedef xmlParserNodeInfo *xmlParserNodeInfoPtr; + +struct _xmlParserNodeInfo { + const struct _xmlNode* node; + /* Position & line # that text that created the node begins & ends on */ + unsigned long begin_pos; + unsigned long begin_line; + unsigned long end_pos; + unsigned long end_line; +}; + +typedef struct _xmlParserNodeInfoSeq xmlParserNodeInfoSeq; +typedef xmlParserNodeInfoSeq *xmlParserNodeInfoSeqPtr; +struct _xmlParserNodeInfoSeq { + unsigned long maximum; + unsigned long length; + xmlParserNodeInfo* buffer; +}; + +/** + * xmlParserInputState: + * + * The parser is now working also as a state based parser. + * The recursive one use the state info for entities processing. + */ +typedef enum { + XML_PARSER_EOF = -1, /* nothing is to be parsed */ + XML_PARSER_START = 0, /* nothing has been parsed */ + XML_PARSER_MISC, /* Misc* before int subset */ + XML_PARSER_PI, /* Within a processing instruction */ + XML_PARSER_DTD, /* within some DTD content */ + XML_PARSER_PROLOG, /* Misc* after internal subset */ + XML_PARSER_COMMENT, /* within a comment */ + XML_PARSER_START_TAG, /* within a start tag */ + XML_PARSER_CONTENT, /* within the content */ + XML_PARSER_CDATA_SECTION, /* within a CDATA section */ + XML_PARSER_END_TAG, /* within a closing tag */ + XML_PARSER_ENTITY_DECL, /* within an entity declaration */ + XML_PARSER_ENTITY_VALUE, /* within an entity value in a decl */ + XML_PARSER_ATTRIBUTE_VALUE, /* within an attribute value */ + XML_PARSER_SYSTEM_LITERAL, /* within a SYSTEM value */ + XML_PARSER_EPILOG, /* the Misc* after the last end tag */ + XML_PARSER_IGNORE, /* within an IGNORED section */ + XML_PARSER_PUBLIC_LITERAL, /* within a PUBLIC value */ + XML_PARSER_XML_DECL /* before XML decl (but after BOM) */ +} xmlParserInputState; + +/** DOC_DISABLE */ +/* + * Internal bits in the 'loadsubset' context member + */ +#define XML_DETECT_IDS 2 +#define XML_COMPLETE_ATTRS 4 +#define XML_SKIP_IDS 8 +/** DOC_ENABLE */ + +/** + * xmlParserMode: + * + * A parser can operate in various modes + */ +typedef enum { + XML_PARSE_UNKNOWN = 0, + XML_PARSE_DOM = 1, + XML_PARSE_SAX = 2, + XML_PARSE_PUSH_DOM = 3, + XML_PARSE_PUSH_SAX = 4, + XML_PARSE_READER = 5 +} xmlParserMode; + +typedef struct _xmlStartTag xmlStartTag; +typedef struct _xmlParserNsData xmlParserNsData; +typedef struct _xmlAttrHashBucket xmlAttrHashBucket; + +/** + * xmlParserCtxt: + * + * The parser context. + * NOTE This doesn't completely define the parser state, the (current ?) + * design of the parser uses recursive function calls since this allow + * and easy mapping from the production rules of the specification + * to the actual code. The drawback is that the actual function call + * also reflect the parser state. However most of the parsing routines + * takes as the only argument the parser context pointer, so migrating + * to a state based parser for progressive parsing shouldn't be too hard. + */ +struct _xmlParserCtxt { + struct _xmlSAXHandler *sax; /* The SAX handler */ + void *userData; /* For SAX interface only, used by DOM build */ + xmlDocPtr myDoc; /* the document being built */ + int wellFormed; /* is the document well formed */ + int replaceEntities; /* shall we replace entities ? */ + const xmlChar *version; /* the XML version string */ + const xmlChar *encoding; /* the declared encoding, if any */ + int standalone; /* standalone document */ + int html; /* an HTML(1) document + * 3 is HTML after + * 10 is HTML after + */ + + /* Input stream stack */ + xmlParserInputPtr input; /* Current input stream */ + int inputNr; /* Number of current input streams */ + int inputMax; /* Max number of input streams */ + xmlParserInputPtr *inputTab; /* stack of inputs */ + + /* Node analysis stack only used for DOM building */ + xmlNodePtr node; /* Current parsed Node */ + int nodeNr; /* Depth of the parsing stack */ + int nodeMax; /* Max depth of the parsing stack */ + xmlNodePtr *nodeTab; /* array of nodes */ + + int record_info; /* Whether node info should be kept */ + xmlParserNodeInfoSeq node_seq; /* info about each node parsed */ + + int errNo; /* error code */ + + int hasExternalSubset; /* reference and external subset */ + int hasPErefs; /* the internal subset has PE refs */ + int external; /* unused */ + + int valid; /* is the document valid */ + int validate; /* shall we try to validate ? */ + xmlValidCtxt vctxt; /* The validity context */ + + xmlParserInputState instate; /* push parser state */ + int token; /* unused */ + + char *directory; /* unused */ + + /* Node name stack */ + const xmlChar *name; /* Current parsed Node */ + int nameNr; /* Depth of the parsing stack */ + int nameMax; /* Max depth of the parsing stack */ + const xmlChar * *nameTab; /* array of nodes */ + + long nbChars; /* unused */ + long checkIndex; /* used by progressive parsing lookup */ + int keepBlanks; /* ugly but ... */ + int disableSAX; /* SAX callbacks are disabled */ + int inSubset; /* Parsing is in int 1/ext 2 subset */ + const xmlChar * intSubName; /* name of subset */ + xmlChar * extSubURI; /* URI of external subset */ + xmlChar * extSubSystem; /* SYSTEM ID of external subset */ + + /* xml:space values */ + int * space; /* Should the parser preserve spaces */ + int spaceNr; /* Depth of the parsing stack */ + int spaceMax; /* Max depth of the parsing stack */ + int * spaceTab; /* array of space infos */ + + int depth; /* to prevent entity substitution loops */ + xmlParserInputPtr entity; /* unused */ + int charset; /* unused */ + int nodelen; /* Those two fields are there to */ + int nodemem; /* Speed up large node parsing */ + int pedantic; /* signal pedantic warnings */ + void *_private; /* For user data, libxml won't touch it */ + + int loadsubset; /* should the external subset be loaded */ + int linenumbers; /* set line number in element content */ + void *catalogs; /* document's own catalog */ + int recovery; /* run in recovery mode */ + int progressive; /* unused */ + xmlDictPtr dict; /* dictionary for the parser */ + const xmlChar * *atts; /* array for the attributes callbacks */ + int maxatts; /* the size of the array */ + int docdict; /* unused */ + + /* + * pre-interned strings + */ + const xmlChar *str_xml; + const xmlChar *str_xmlns; + const xmlChar *str_xml_ns; + + /* + * Everything below is used only by the new SAX mode + */ + int sax2; /* operating in the new SAX mode */ + int nsNr; /* the number of inherited namespaces */ + int nsMax; /* the size of the arrays */ + const xmlChar * *nsTab; /* the array of prefix/namespace name */ + unsigned *attallocs; /* which attribute were allocated */ + xmlStartTag *pushTab; /* array of data for push */ + xmlHashTablePtr attsDefault; /* defaulted attributes if any */ + xmlHashTablePtr attsSpecial; /* non-CDATA attributes if any */ + int nsWellFormed; /* is the document XML Namespace okay */ + int options; /* Extra options */ + + /* + * Those fields are needed only for streaming parsing so far + */ + int dictNames; /* Use dictionary names for the tree */ + int freeElemsNr; /* number of freed element nodes */ + xmlNodePtr freeElems; /* List of freed element nodes */ + int freeAttrsNr; /* number of freed attributes nodes */ + xmlAttrPtr freeAttrs; /* List of freed attributes nodes */ + + /* + * the complete error information for the last error. + */ + xmlError lastError; + xmlParserMode parseMode; /* the parser mode */ + unsigned long nbentities; /* unused */ + unsigned long sizeentities; /* size of external entities */ + + /* for use by HTML non-recursive parser */ + xmlParserNodeInfo *nodeInfo; /* Current NodeInfo */ + int nodeInfoNr; /* Depth of the parsing stack */ + int nodeInfoMax; /* Max depth of the parsing stack */ + xmlParserNodeInfo *nodeInfoTab; /* array of nodeInfos */ + + int input_id; /* we need to label inputs */ + unsigned long sizeentcopy; /* volume of entity copy */ + + int endCheckState; /* quote state for push parser */ + unsigned short nbErrors; /* number of errors */ + unsigned short nbWarnings; /* number of warnings */ + unsigned maxAmpl; /* maximum amplification factor */ + + xmlParserNsData *nsdb; /* namespace database */ + unsigned attrHashMax; /* allocated size */ + xmlAttrHashBucket *attrHash; /* atttribute hash table */ + + xmlStructuredErrorFunc errorHandler; + void *errorCtxt; +}; + +/** + * xmlSAXLocator: + * + * A SAX Locator. + */ +struct _xmlSAXLocator { + const xmlChar *(*getPublicId)(void *ctx); + const xmlChar *(*getSystemId)(void *ctx); + int (*getLineNumber)(void *ctx); + int (*getColumnNumber)(void *ctx); +}; + +/** + * xmlSAXHandler: + * + * A SAX handler is bunch of callbacks called by the parser when processing + * of the input generate data or structure information. + */ + +/** + * resolveEntitySAXFunc: + * @ctx: the user data (XML parser context) + * @publicId: The public ID of the entity + * @systemId: The system ID of the entity + * + * Callback: + * The entity loader, to control the loading of external entities, + * the application can either: + * - override this resolveEntity() callback in the SAX block + * - or better use the xmlSetExternalEntityLoader() function to + * set up it's own entity resolution routine + * + * Returns the xmlParserInputPtr if inlined or NULL for DOM behaviour. + */ +typedef xmlParserInputPtr (*resolveEntitySAXFunc) (void *ctx, + const xmlChar *publicId, + const xmlChar *systemId); +/** + * internalSubsetSAXFunc: + * @ctx: the user data (XML parser context) + * @name: the root element name + * @ExternalID: the external ID + * @SystemID: the SYSTEM ID (e.g. filename or URL) + * + * Callback on internal subset declaration. + */ +typedef void (*internalSubsetSAXFunc) (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +/** + * externalSubsetSAXFunc: + * @ctx: the user data (XML parser context) + * @name: the root element name + * @ExternalID: the external ID + * @SystemID: the SYSTEM ID (e.g. filename or URL) + * + * Callback on external subset declaration. + */ +typedef void (*externalSubsetSAXFunc) (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +/** + * getEntitySAXFunc: + * @ctx: the user data (XML parser context) + * @name: The entity name + * + * Get an entity by name. + * + * Returns the xmlEntityPtr if found. + */ +typedef xmlEntityPtr (*getEntitySAXFunc) (void *ctx, + const xmlChar *name); +/** + * getParameterEntitySAXFunc: + * @ctx: the user data (XML parser context) + * @name: The entity name + * + * Get a parameter entity by name. + * + * Returns the xmlEntityPtr if found. + */ +typedef xmlEntityPtr (*getParameterEntitySAXFunc) (void *ctx, + const xmlChar *name); +/** + * entityDeclSAXFunc: + * @ctx: the user data (XML parser context) + * @name: the entity name + * @type: the entity type + * @publicId: The public ID of the entity + * @systemId: The system ID of the entity + * @content: the entity value (without processing). + * + * An entity definition has been parsed. + */ +typedef void (*entityDeclSAXFunc) (void *ctx, + const xmlChar *name, + int type, + const xmlChar *publicId, + const xmlChar *systemId, + xmlChar *content); +/** + * notationDeclSAXFunc: + * @ctx: the user data (XML parser context) + * @name: The name of the notation + * @publicId: The public ID of the entity + * @systemId: The system ID of the entity + * + * What to do when a notation declaration has been parsed. + */ +typedef void (*notationDeclSAXFunc)(void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId); +/** + * attributeDeclSAXFunc: + * @ctx: the user data (XML parser context) + * @elem: the name of the element + * @fullname: the attribute name + * @type: the attribute type + * @def: the type of default value + * @defaultValue: the attribute default value + * @tree: the tree of enumerated value set + * + * An attribute definition has been parsed. + */ +typedef void (*attributeDeclSAXFunc)(void *ctx, + const xmlChar *elem, + const xmlChar *fullname, + int type, + int def, + const xmlChar *defaultValue, + xmlEnumerationPtr tree); +/** + * elementDeclSAXFunc: + * @ctx: the user data (XML parser context) + * @name: the element name + * @type: the element type + * @content: the element value tree + * + * An element definition has been parsed. + */ +typedef void (*elementDeclSAXFunc)(void *ctx, + const xmlChar *name, + int type, + xmlElementContentPtr content); +/** + * unparsedEntityDeclSAXFunc: + * @ctx: the user data (XML parser context) + * @name: The name of the entity + * @publicId: The public ID of the entity + * @systemId: The system ID of the entity + * @notationName: the name of the notation + * + * What to do when an unparsed entity declaration is parsed. + */ +typedef void (*unparsedEntityDeclSAXFunc)(void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId, + const xmlChar *notationName); +/** + * setDocumentLocatorSAXFunc: + * @ctx: the user data (XML parser context) + * @loc: A SAX Locator + * + * Receive the document locator at startup, actually xmlDefaultSAXLocator. + * Everything is available on the context, so this is useless in our case. + */ +typedef void (*setDocumentLocatorSAXFunc) (void *ctx, + xmlSAXLocatorPtr loc); +/** + * startDocumentSAXFunc: + * @ctx: the user data (XML parser context) + * + * Called when the document start being processed. + */ +typedef void (*startDocumentSAXFunc) (void *ctx); +/** + * endDocumentSAXFunc: + * @ctx: the user data (XML parser context) + * + * Called when the document end has been detected. + */ +typedef void (*endDocumentSAXFunc) (void *ctx); +/** + * startElementSAXFunc: + * @ctx: the user data (XML parser context) + * @name: The element name, including namespace prefix + * @atts: An array of name/value attributes pairs, NULL terminated + * + * Called when an opening tag has been processed. + */ +typedef void (*startElementSAXFunc) (void *ctx, + const xmlChar *name, + const xmlChar **atts); +/** + * endElementSAXFunc: + * @ctx: the user data (XML parser context) + * @name: The element name + * + * Called when the end of an element has been detected. + */ +typedef void (*endElementSAXFunc) (void *ctx, + const xmlChar *name); +/** + * attributeSAXFunc: + * @ctx: the user data (XML parser context) + * @name: The attribute name, including namespace prefix + * @value: The attribute value + * + * Handle an attribute that has been read by the parser. + * The default handling is to convert the attribute into an + * DOM subtree and past it in a new xmlAttr element added to + * the element. + */ +typedef void (*attributeSAXFunc) (void *ctx, + const xmlChar *name, + const xmlChar *value); +/** + * referenceSAXFunc: + * @ctx: the user data (XML parser context) + * @name: The entity name + * + * Called when an entity reference is detected. + */ +typedef void (*referenceSAXFunc) (void *ctx, + const xmlChar *name); +/** + * charactersSAXFunc: + * @ctx: the user data (XML parser context) + * @ch: a xmlChar string + * @len: the number of xmlChar + * + * Receiving some chars from the parser. + */ +typedef void (*charactersSAXFunc) (void *ctx, + const xmlChar *ch, + int len); +/** + * ignorableWhitespaceSAXFunc: + * @ctx: the user data (XML parser context) + * @ch: a xmlChar string + * @len: the number of xmlChar + * + * Receiving some ignorable whitespaces from the parser. + * UNUSED: by default the DOM building will use characters. + */ +typedef void (*ignorableWhitespaceSAXFunc) (void *ctx, + const xmlChar *ch, + int len); +/** + * processingInstructionSAXFunc: + * @ctx: the user data (XML parser context) + * @target: the target name + * @data: the PI data's + * + * A processing instruction has been parsed. + */ +typedef void (*processingInstructionSAXFunc) (void *ctx, + const xmlChar *target, + const xmlChar *data); +/** + * commentSAXFunc: + * @ctx: the user data (XML parser context) + * @value: the comment content + * + * A comment has been parsed. + */ +typedef void (*commentSAXFunc) (void *ctx, + const xmlChar *value); +/** + * cdataBlockSAXFunc: + * @ctx: the user data (XML parser context) + * @value: The pcdata content + * @len: the block length + * + * Called when a pcdata block has been parsed. + */ +typedef void (*cdataBlockSAXFunc) ( + void *ctx, + const xmlChar *value, + int len); +/** + * warningSAXFunc: + * @ctx: an XML parser context + * @msg: the message to display/transmit + * @...: extra parameters for the message display + * + * Display and format a warning messages, callback. + */ +typedef void (*warningSAXFunc) (void *ctx, + const char *msg, ...) LIBXML_ATTR_FORMAT(2,3); +/** + * errorSAXFunc: + * @ctx: an XML parser context + * @msg: the message to display/transmit + * @...: extra parameters for the message display + * + * Display and format an error messages, callback. + */ +typedef void (*errorSAXFunc) (void *ctx, + const char *msg, ...) LIBXML_ATTR_FORMAT(2,3); +/** + * fatalErrorSAXFunc: + * @ctx: an XML parser context + * @msg: the message to display/transmit + * @...: extra parameters for the message display + * + * Display and format fatal error messages, callback. + * Note: so far fatalError() SAX callbacks are not used, error() + * get all the callbacks for errors. + */ +typedef void (*fatalErrorSAXFunc) (void *ctx, + const char *msg, ...) LIBXML_ATTR_FORMAT(2,3); +/** + * isStandaloneSAXFunc: + * @ctx: the user data (XML parser context) + * + * Is this document tagged standalone? + * + * Returns 1 if true + */ +typedef int (*isStandaloneSAXFunc) (void *ctx); +/** + * hasInternalSubsetSAXFunc: + * @ctx: the user data (XML parser context) + * + * Does this document has an internal subset. + * + * Returns 1 if true + */ +typedef int (*hasInternalSubsetSAXFunc) (void *ctx); + +/** + * hasExternalSubsetSAXFunc: + * @ctx: the user data (XML parser context) + * + * Does this document has an external subset? + * + * Returns 1 if true + */ +typedef int (*hasExternalSubsetSAXFunc) (void *ctx); + +/************************************************************************ + * * + * The SAX version 2 API extensions * + * * + ************************************************************************/ +/** + * XML_SAX2_MAGIC: + * + * Special constant found in SAX2 blocks initialized fields + */ +#define XML_SAX2_MAGIC 0xDEEDBEAF + +/** + * startElementNsSAX2Func: + * @ctx: the user data (XML parser context) + * @localname: the local name of the element + * @prefix: the element namespace prefix if available + * @URI: the element namespace name if available + * @nb_namespaces: number of namespace definitions on that node + * @namespaces: pointer to the array of prefix/URI pairs namespace definitions + * @nb_attributes: the number of attributes on that node + * @nb_defaulted: the number of defaulted attributes. The defaulted + * ones are at the end of the array + * @attributes: pointer to the array of (localname/prefix/URI/value/end) + * attribute values. + * + * SAX2 callback when an element start has been detected by the parser. + * It provides the namespace information for the element, as well as + * the new namespace declarations on the element. + */ + +typedef void (*startElementNsSAX2Func) (void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *URI, + int nb_namespaces, + const xmlChar **namespaces, + int nb_attributes, + int nb_defaulted, + const xmlChar **attributes); + +/** + * endElementNsSAX2Func: + * @ctx: the user data (XML parser context) + * @localname: the local name of the element + * @prefix: the element namespace prefix if available + * @URI: the element namespace name if available + * + * SAX2 callback when an element end has been detected by the parser. + * It provides the namespace information for the element. + */ + +typedef void (*endElementNsSAX2Func) (void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *URI); + + +struct _xmlSAXHandler { + internalSubsetSAXFunc internalSubset; + isStandaloneSAXFunc isStandalone; + hasInternalSubsetSAXFunc hasInternalSubset; + hasExternalSubsetSAXFunc hasExternalSubset; + resolveEntitySAXFunc resolveEntity; + getEntitySAXFunc getEntity; + entityDeclSAXFunc entityDecl; + notationDeclSAXFunc notationDecl; + attributeDeclSAXFunc attributeDecl; + elementDeclSAXFunc elementDecl; + unparsedEntityDeclSAXFunc unparsedEntityDecl; + setDocumentLocatorSAXFunc setDocumentLocator; + startDocumentSAXFunc startDocument; + endDocumentSAXFunc endDocument; + /* + * `startElement` and `endElement` are only used by the legacy SAX1 + * interface and should not be used in new software. If you really + * have to enable SAX1, the preferred way is set the `initialized` + * member to 1 instead of XML_SAX2_MAGIC. + * + * For backward compatibility, it's also possible to set the + * `startElementNs` and `endElementNs` handlers to NULL. + * + * You can also set the XML_PARSE_SAX1 parser option, but versions + * older than 2.12.0 will probably crash if this option is provided + * together with custom SAX callbacks. + */ + startElementSAXFunc startElement; + endElementSAXFunc endElement; + referenceSAXFunc reference; + charactersSAXFunc characters; + ignorableWhitespaceSAXFunc ignorableWhitespace; + processingInstructionSAXFunc processingInstruction; + commentSAXFunc comment; + warningSAXFunc warning; + errorSAXFunc error; + fatalErrorSAXFunc fatalError; /* unused error() get all the errors */ + getParameterEntitySAXFunc getParameterEntity; + cdataBlockSAXFunc cdataBlock; + externalSubsetSAXFunc externalSubset; + /* + * `initialized` should always be set to XML_SAX2_MAGIC to enable the + * modern SAX2 interface. + */ + unsigned int initialized; + /* + * The following members are only used by the SAX2 interface. + */ + void *_private; + startElementNsSAX2Func startElementNs; + endElementNsSAX2Func endElementNs; + xmlStructuredErrorFunc serror; +}; + +/* + * SAX Version 1 + */ +typedef struct _xmlSAXHandlerV1 xmlSAXHandlerV1; +typedef xmlSAXHandlerV1 *xmlSAXHandlerV1Ptr; +struct _xmlSAXHandlerV1 { + internalSubsetSAXFunc internalSubset; + isStandaloneSAXFunc isStandalone; + hasInternalSubsetSAXFunc hasInternalSubset; + hasExternalSubsetSAXFunc hasExternalSubset; + resolveEntitySAXFunc resolveEntity; + getEntitySAXFunc getEntity; + entityDeclSAXFunc entityDecl; + notationDeclSAXFunc notationDecl; + attributeDeclSAXFunc attributeDecl; + elementDeclSAXFunc elementDecl; + unparsedEntityDeclSAXFunc unparsedEntityDecl; + setDocumentLocatorSAXFunc setDocumentLocator; + startDocumentSAXFunc startDocument; + endDocumentSAXFunc endDocument; + startElementSAXFunc startElement; + endElementSAXFunc endElement; + referenceSAXFunc reference; + charactersSAXFunc characters; + ignorableWhitespaceSAXFunc ignorableWhitespace; + processingInstructionSAXFunc processingInstruction; + commentSAXFunc comment; + warningSAXFunc warning; + errorSAXFunc error; + fatalErrorSAXFunc fatalError; /* unused error() get all the errors */ + getParameterEntitySAXFunc getParameterEntity; + cdataBlockSAXFunc cdataBlock; + externalSubsetSAXFunc externalSubset; + unsigned int initialized; +}; + + +/** + * xmlExternalEntityLoader: + * @URL: The System ID of the resource requested + * @ID: The Public ID of the resource requested + * @context: the XML parser context + * + * External entity loaders types. + * + * Returns the entity input parser. + */ +typedef xmlParserInputPtr (*xmlExternalEntityLoader) (const char *URL, + const char *ID, + xmlParserCtxtPtr context); + +/* + * Variables + */ + +XMLPUBVAR const char *const xmlParserVersion; +XML_DEPRECATED +XMLPUBVAR const int oldXMLWDcompatibility; +XML_DEPRECATED +XMLPUBVAR const int xmlParserDebugEntities; +XML_DEPRECATED +XMLPUBVAR const xmlSAXLocator xmlDefaultSAXLocator; +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBVAR const xmlSAXHandlerV1 xmlDefaultSAXHandler; +#endif + +#ifdef LIBXML_THREAD_ENABLED +/* backward compatibility */ +XMLPUBFUN const char *const *__xmlParserVersion(void); +XML_DEPRECATED +XMLPUBFUN const int *__oldXMLWDcompatibility(void); +XML_DEPRECATED +XMLPUBFUN const int *__xmlParserDebugEntities(void); +XML_DEPRECATED +XMLPUBFUN const xmlSAXLocator *__xmlDefaultSAXLocator(void); +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN const xmlSAXHandlerV1 *__xmlDefaultSAXHandler(void); +#endif +#endif + +/** DOC_DISABLE */ +#define XML_GLOBALS_PARSER_CORE \ + XML_OP(xmlDoValidityCheckingDefaultValue, int, XML_DEPRECATED) \ + XML_OP(xmlGetWarningsDefaultValue, int, XML_DEPRECATED) \ + XML_OP(xmlKeepBlanksDefaultValue, int, XML_DEPRECATED) \ + XML_OP(xmlLineNumbersDefaultValue, int, XML_DEPRECATED) \ + XML_OP(xmlLoadExtDtdDefaultValue, int, XML_DEPRECATED) \ + XML_OP(xmlPedanticParserDefaultValue, int, XML_DEPRECATED) \ + XML_OP(xmlSubstituteEntitiesDefaultValue, int, XML_DEPRECATED) + +#ifdef LIBXML_OUTPUT_ENABLED + #define XML_GLOBALS_PARSER_OUTPUT \ + XML_OP(xmlIndentTreeOutput, int, XML_NO_ATTR) \ + XML_OP(xmlTreeIndentString, const char *, XML_NO_ATTR) \ + XML_OP(xmlSaveNoEmptyTags, int, XML_NO_ATTR) +#else + #define XML_GLOBALS_PARSER_OUTPUT +#endif + +#define XML_GLOBALS_PARSER \ + XML_GLOBALS_PARSER_CORE \ + XML_GLOBALS_PARSER_OUTPUT + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_PARSER +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlDoValidityCheckingDefaultValue \ + XML_GLOBAL_MACRO(xmlDoValidityCheckingDefaultValue) + #define xmlGetWarningsDefaultValue \ + XML_GLOBAL_MACRO(xmlGetWarningsDefaultValue) + #define xmlKeepBlanksDefaultValue XML_GLOBAL_MACRO(xmlKeepBlanksDefaultValue) + #define xmlLineNumbersDefaultValue \ + XML_GLOBAL_MACRO(xmlLineNumbersDefaultValue) + #define xmlLoadExtDtdDefaultValue XML_GLOBAL_MACRO(xmlLoadExtDtdDefaultValue) + #define xmlPedanticParserDefaultValue \ + XML_GLOBAL_MACRO(xmlPedanticParserDefaultValue) + #define xmlSubstituteEntitiesDefaultValue \ + XML_GLOBAL_MACRO(xmlSubstituteEntitiesDefaultValue) + #ifdef LIBXML_OUTPUT_ENABLED + #define xmlIndentTreeOutput XML_GLOBAL_MACRO(xmlIndentTreeOutput) + #define xmlTreeIndentString XML_GLOBAL_MACRO(xmlTreeIndentString) + #define xmlSaveNoEmptyTags XML_GLOBAL_MACRO(xmlSaveNoEmptyTags) + #endif +#endif +/** DOC_ENABLE */ + +/* + * Init/Cleanup + */ +XMLPUBFUN void + xmlInitParser (void); +XMLPUBFUN void + xmlCleanupParser (void); +XML_DEPRECATED +XMLPUBFUN void + xmlInitGlobals (void); +XML_DEPRECATED +XMLPUBFUN void + xmlCleanupGlobals (void); + +/* + * Input functions + */ +XML_DEPRECATED +XMLPUBFUN int + xmlParserInputRead (xmlParserInputPtr in, + int len); +XML_DEPRECATED +XMLPUBFUN int + xmlParserInputGrow (xmlParserInputPtr in, + int len); + +/* + * Basic parsing Interfaces + */ +#ifdef LIBXML_SAX1_ENABLED +XMLPUBFUN xmlDocPtr + xmlParseDoc (const xmlChar *cur); +XMLPUBFUN xmlDocPtr + xmlParseFile (const char *filename); +XMLPUBFUN xmlDocPtr + xmlParseMemory (const char *buffer, + int size); +#endif /* LIBXML_SAX1_ENABLED */ +XML_DEPRECATED XMLPUBFUN int + xmlSubstituteEntitiesDefault(int val); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefSubstituteEntitiesDefaultValue(int v); +XMLPUBFUN int + xmlKeepBlanksDefault (int val); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefKeepBlanksDefaultValue(int v); +XMLPUBFUN void + xmlStopParser (xmlParserCtxtPtr ctxt); +XML_DEPRECATED XMLPUBFUN int + xmlPedanticParserDefault(int val); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefPedanticParserDefaultValue(int v); +XML_DEPRECATED XMLPUBFUN int + xmlLineNumbersDefault (int val); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefLineNumbersDefaultValue(int v); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefDoValidityCheckingDefaultValue(int v); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefGetWarningsDefaultValue(int v); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefLoadExtDtdDefaultValue(int v); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefParserDebugEntities(int v); + +#ifdef LIBXML_SAX1_ENABLED +/* + * Recovery mode + */ +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlRecoverDoc (const xmlChar *cur); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlRecoverMemory (const char *buffer, + int size); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlRecoverFile (const char *filename); +#endif /* LIBXML_SAX1_ENABLED */ + +/* + * Less common routines and SAX interfaces + */ +XMLPUBFUN int + xmlParseDocument (xmlParserCtxtPtr ctxt); +XMLPUBFUN int + xmlParseExtParsedEnt (xmlParserCtxtPtr ctxt); +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN int + xmlSAXUserParseFile (xmlSAXHandlerPtr sax, + void *user_data, + const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlSAXUserParseMemory (xmlSAXHandlerPtr sax, + void *user_data, + const char *buffer, + int size); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlSAXParseDoc (xmlSAXHandlerPtr sax, + const xmlChar *cur, + int recovery); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlSAXParseMemory (xmlSAXHandlerPtr sax, + const char *buffer, + int size, + int recovery); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlSAXParseMemoryWithData (xmlSAXHandlerPtr sax, + const char *buffer, + int size, + int recovery, + void *data); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlSAXParseFile (xmlSAXHandlerPtr sax, + const char *filename, + int recovery); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlSAXParseFileWithData (xmlSAXHandlerPtr sax, + const char *filename, + int recovery, + void *data); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlSAXParseEntity (xmlSAXHandlerPtr sax, + const char *filename); +XML_DEPRECATED +XMLPUBFUN xmlDocPtr + xmlParseEntity (const char *filename); +#endif /* LIBXML_SAX1_ENABLED */ + +#ifdef LIBXML_VALID_ENABLED +XML_DEPRECATED +XMLPUBFUN xmlDtdPtr + xmlSAXParseDTD (xmlSAXHandlerPtr sax, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlDtdPtr + xmlParseDTD (const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlDtdPtr + xmlIOParseDTD (xmlSAXHandlerPtr sax, + xmlParserInputBufferPtr input, + xmlCharEncoding enc); +#endif /* LIBXML_VALID_ENABLE */ +#ifdef LIBXML_SAX1_ENABLED +XMLPUBFUN int + xmlParseBalancedChunkMemory(xmlDocPtr doc, + xmlSAXHandlerPtr sax, + void *user_data, + int depth, + const xmlChar *string, + xmlNodePtr *lst); +#endif /* LIBXML_SAX1_ENABLED */ +XMLPUBFUN xmlParserErrors + xmlParseInNodeContext (xmlNodePtr node, + const char *data, + int datalen, + int options, + xmlNodePtr *lst); +#ifdef LIBXML_SAX1_ENABLED +XMLPUBFUN int + xmlParseBalancedChunkMemoryRecover(xmlDocPtr doc, + xmlSAXHandlerPtr sax, + void *user_data, + int depth, + const xmlChar *string, + xmlNodePtr *lst, + int recover); +XML_DEPRECATED +XMLPUBFUN int + xmlParseExternalEntity (xmlDocPtr doc, + xmlSAXHandlerPtr sax, + void *user_data, + int depth, + const xmlChar *URL, + const xmlChar *ID, + xmlNodePtr *lst); +#endif /* LIBXML_SAX1_ENABLED */ +XMLPUBFUN int + xmlParseCtxtExternalEntity(xmlParserCtxtPtr ctx, + const xmlChar *URL, + const xmlChar *ID, + xmlNodePtr *lst); + +/* + * Parser contexts handling. + */ +XMLPUBFUN xmlParserCtxtPtr + xmlNewParserCtxt (void); +XMLPUBFUN xmlParserCtxtPtr + xmlNewSAXParserCtxt (const xmlSAXHandler *sax, + void *userData); +XMLPUBFUN int + xmlInitParserCtxt (xmlParserCtxtPtr ctxt); +XMLPUBFUN void + xmlClearParserCtxt (xmlParserCtxtPtr ctxt); +XMLPUBFUN void + xmlFreeParserCtxt (xmlParserCtxtPtr ctxt); +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlSetupParserForBuffer (xmlParserCtxtPtr ctxt, + const xmlChar* buffer, + const char *filename); +#endif /* LIBXML_SAX1_ENABLED */ +XMLPUBFUN xmlParserCtxtPtr + xmlCreateDocParserCtxt (const xmlChar *cur); + +#ifdef LIBXML_LEGACY_ENABLED +/* + * Reading/setting optional parsing features. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlGetFeaturesList (int *len, + const char **result); +XML_DEPRECATED +XMLPUBFUN int + xmlGetFeature (xmlParserCtxtPtr ctxt, + const char *name, + void *result); +XML_DEPRECATED +XMLPUBFUN int + xmlSetFeature (xmlParserCtxtPtr ctxt, + const char *name, + void *value); +#endif /* LIBXML_LEGACY_ENABLED */ + +#ifdef LIBXML_PUSH_ENABLED +/* + * Interfaces for the Push mode. + */ +XMLPUBFUN xmlParserCtxtPtr + xmlCreatePushParserCtxt(xmlSAXHandlerPtr sax, + void *user_data, + const char *chunk, + int size, + const char *filename); +XMLPUBFUN int + xmlParseChunk (xmlParserCtxtPtr ctxt, + const char *chunk, + int size, + int terminate); +#endif /* LIBXML_PUSH_ENABLED */ + +/* + * Special I/O mode. + */ + +XMLPUBFUN xmlParserCtxtPtr + xmlCreateIOParserCtxt (xmlSAXHandlerPtr sax, + void *user_data, + xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + xmlCharEncoding enc); + +XMLPUBFUN xmlParserInputPtr + xmlNewIOInputStream (xmlParserCtxtPtr ctxt, + xmlParserInputBufferPtr input, + xmlCharEncoding enc); + +/* + * Node infos. + */ +XMLPUBFUN const xmlParserNodeInfo* + xmlParserFindNodeInfo (xmlParserCtxtPtr ctxt, + xmlNodePtr node); +XMLPUBFUN void + xmlInitNodeInfoSeq (xmlParserNodeInfoSeqPtr seq); +XMLPUBFUN void + xmlClearNodeInfoSeq (xmlParserNodeInfoSeqPtr seq); +XMLPUBFUN unsigned long + xmlParserFindNodeInfoIndex(xmlParserNodeInfoSeqPtr seq, + xmlNodePtr node); +XMLPUBFUN void + xmlParserAddNodeInfo (xmlParserCtxtPtr ctxt, + xmlParserNodeInfoPtr info); + +/* + * External entities handling actually implemented in xmlIO. + */ + +XMLPUBFUN void + xmlSetExternalEntityLoader(xmlExternalEntityLoader f); +XMLPUBFUN xmlExternalEntityLoader + xmlGetExternalEntityLoader(void); +XMLPUBFUN xmlParserInputPtr + xmlLoadExternalEntity (const char *URL, + const char *ID, + xmlParserCtxtPtr ctxt); + +/* + * Index lookup, actually implemented in the encoding module + */ +XMLPUBFUN long + xmlByteConsumed (xmlParserCtxtPtr ctxt); + +/* + * New set of simpler/more flexible APIs + */ +/** + * xmlParserOption: + * + * This is the set of XML parser options that can be passed down + * to the xmlReadDoc() and similar calls. + */ +typedef enum { + XML_PARSE_RECOVER = 1<<0, /* recover on errors */ + XML_PARSE_NOENT = 1<<1, /* substitute entities */ + XML_PARSE_DTDLOAD = 1<<2, /* load the external subset */ + XML_PARSE_DTDATTR = 1<<3, /* default DTD attributes */ + XML_PARSE_DTDVALID = 1<<4, /* validate with the DTD */ + XML_PARSE_NOERROR = 1<<5, /* suppress error reports */ + XML_PARSE_NOWARNING = 1<<6, /* suppress warning reports */ + XML_PARSE_PEDANTIC = 1<<7, /* pedantic error reporting */ + XML_PARSE_NOBLANKS = 1<<8, /* remove blank nodes */ + XML_PARSE_SAX1 = 1<<9, /* use the SAX1 interface internally */ + XML_PARSE_XINCLUDE = 1<<10,/* Implement XInclude substitution */ + XML_PARSE_NONET = 1<<11,/* Forbid network access */ + XML_PARSE_NODICT = 1<<12,/* Do not reuse the context dictionary */ + XML_PARSE_NSCLEAN = 1<<13,/* remove redundant namespaces declarations */ + XML_PARSE_NOCDATA = 1<<14,/* merge CDATA as text nodes */ + XML_PARSE_NOXINCNODE= 1<<15,/* do not generate XINCLUDE START/END nodes */ + XML_PARSE_COMPACT = 1<<16,/* compact small text nodes; no modification of + the tree allowed afterwards (will possibly + crash if you try to modify the tree) */ + XML_PARSE_OLD10 = 1<<17,/* parse using XML-1.0 before update 5 */ + XML_PARSE_NOBASEFIX = 1<<18,/* do not fixup XINCLUDE xml:base uris */ + XML_PARSE_HUGE = 1<<19,/* relax any hardcoded limit from the parser */ + XML_PARSE_OLDSAX = 1<<20,/* parse using SAX2 interface before 2.7.0 */ + XML_PARSE_IGNORE_ENC= 1<<21,/* ignore internal document encoding hint */ + XML_PARSE_BIG_LINES = 1<<22,/* Store big lines numbers in text PSVI field */ + XML_PARSE_NO_XXE = 1<<23 /* disable loading of external content */ +} xmlParserOption; + +XMLPUBFUN void + xmlCtxtReset (xmlParserCtxtPtr ctxt); +XMLPUBFUN int + xmlCtxtResetPush (xmlParserCtxtPtr ctxt, + const char *chunk, + int size, + const char *filename, + const char *encoding); +XMLPUBFUN int + xmlCtxtSetOptions (xmlParserCtxtPtr ctxt, + int options); +XMLPUBFUN int + xmlCtxtUseOptions (xmlParserCtxtPtr ctxt, + int options); +XMLPUBFUN void + xmlCtxtSetErrorHandler (xmlParserCtxtPtr ctxt, + xmlStructuredErrorFunc handler, + void *data); +XMLPUBFUN void + xmlCtxtSetMaxAmplification(xmlParserCtxtPtr ctxt, + unsigned maxAmpl); +XMLPUBFUN xmlDocPtr + xmlReadDoc (const xmlChar *cur, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlReadFile (const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlReadMemory (const char *buffer, + int size, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlReadFd (int fd, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlReadIO (xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlCtxtParseDocument (xmlParserCtxtPtr ctxt, + xmlParserInputPtr input); +XMLPUBFUN xmlDocPtr + xmlCtxtReadDoc (xmlParserCtxtPtr ctxt, + const xmlChar *cur, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlCtxtReadFile (xmlParserCtxtPtr ctxt, + const char *filename, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlCtxtReadMemory (xmlParserCtxtPtr ctxt, + const char *buffer, + int size, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlCtxtReadFd (xmlParserCtxtPtr ctxt, + int fd, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlDocPtr + xmlCtxtReadIO (xmlParserCtxtPtr ctxt, + xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + const char *URL, + const char *encoding, + int options); + +/* + * Library wide options + */ +/** + * xmlFeature: + * + * Used to examine the existence of features that can be enabled + * or disabled at compile-time. + * They used to be called XML_FEATURE_xxx but this clashed with Expat + */ +typedef enum { + XML_WITH_THREAD = 1, + XML_WITH_TREE = 2, + XML_WITH_OUTPUT = 3, + XML_WITH_PUSH = 4, + XML_WITH_READER = 5, + XML_WITH_PATTERN = 6, + XML_WITH_WRITER = 7, + XML_WITH_SAX1 = 8, + XML_WITH_FTP = 9, + XML_WITH_HTTP = 10, + XML_WITH_VALID = 11, + XML_WITH_HTML = 12, + XML_WITH_LEGACY = 13, + XML_WITH_C14N = 14, + XML_WITH_CATALOG = 15, + XML_WITH_XPATH = 16, + XML_WITH_XPTR = 17, + XML_WITH_XINCLUDE = 18, + XML_WITH_ICONV = 19, + XML_WITH_ISO8859X = 20, + XML_WITH_UNICODE = 21, + XML_WITH_REGEXP = 22, + XML_WITH_AUTOMATA = 23, + XML_WITH_EXPR = 24, + XML_WITH_SCHEMAS = 25, + XML_WITH_SCHEMATRON = 26, + XML_WITH_MODULES = 27, + XML_WITH_DEBUG = 28, + XML_WITH_DEBUG_MEM = 29, + XML_WITH_DEBUG_RUN = 30, /* unused */ + XML_WITH_ZLIB = 31, + XML_WITH_ICU = 32, + XML_WITH_LZMA = 33, + XML_WITH_NONE = 99999 /* just to be sure of allocation size */ +} xmlFeature; + +XMLPUBFUN int + xmlHasFeature (xmlFeature feature); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_PARSER_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parserInternals.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parserInternals.h new file mode 100644 index 000000000..c4d4363b5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/parserInternals.h @@ -0,0 +1,671 @@ +/* + * Summary: internals routines and limits exported by the parser. + * Description: this module exports a number of internal parsing routines + * they are not really all intended for applications but + * can prove useful doing low level processing. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_PARSER_INTERNALS_H__ +#define __XML_PARSER_INTERNALS_H__ + +#include +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlParserMaxDepth: + * + * DEPRECATED: has no effect + * + * arbitrary depth limit for the XML documents that we allow to + * process. This is not a limitation of the parser but a safety + * boundary feature, use XML_PARSE_HUGE option to override it. + */ +XML_DEPRECATED +XMLPUBVAR const unsigned int xmlParserMaxDepth; + +/** + * XML_MAX_TEXT_LENGTH: + * + * Maximum size allowed for a single text node when building a tree. + * This is not a limitation of the parser but a safety boundary feature, + * use XML_PARSE_HUGE option to override it. + * Introduced in 2.9.0 + */ +#define XML_MAX_TEXT_LENGTH 10000000 + +/** + * XML_MAX_HUGE_LENGTH: + * + * Maximum size allowed when XML_PARSE_HUGE is set. + */ +#define XML_MAX_HUGE_LENGTH 1000000000 + +/** + * XML_MAX_NAME_LENGTH: + * + * Maximum size allowed for a markup identifier. + * This is not a limitation of the parser but a safety boundary feature, + * use XML_PARSE_HUGE option to override it. + * Note that with the use of parsing dictionaries overriding the limit + * may result in more runtime memory usage in face of "unfriendly' content + * Introduced in 2.9.0 + */ +#define XML_MAX_NAME_LENGTH 50000 + +/** + * XML_MAX_DICTIONARY_LIMIT: + * + * Maximum size allowed by the parser for a dictionary by default + * This is not a limitation of the parser but a safety boundary feature, + * use XML_PARSE_HUGE option to override it. + * Introduced in 2.9.0 + */ +#define XML_MAX_DICTIONARY_LIMIT 10000000 + +/** + * XML_MAX_LOOKUP_LIMIT: + * + * Maximum size allowed by the parser for ahead lookup + * This is an upper boundary enforced by the parser to avoid bad + * behaviour on "unfriendly' content + * Introduced in 2.9.0 + */ +#define XML_MAX_LOOKUP_LIMIT 10000000 + +/** + * XML_MAX_NAMELEN: + * + * Identifiers can be longer, but this will be more costly + * at runtime. + */ +#define XML_MAX_NAMELEN 100 + +/** + * INPUT_CHUNK: + * + * The parser tries to always have that amount of input ready. + * One of the point is providing context when reporting errors. + */ +#define INPUT_CHUNK 250 + +/************************************************************************ + * * + * UNICODE version of the macros. * + * * + ************************************************************************/ +/** + * IS_BYTE_CHAR: + * @c: an byte value (int) + * + * Macro to check the following production in the XML spec: + * + * [2] Char ::= #x9 | #xA | #xD | [#x20...] + * any byte character in the accepted range + */ +#define IS_BYTE_CHAR(c) xmlIsChar_ch(c) + +/** + * IS_CHAR: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * [2] Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] + * | [#x10000-#x10FFFF] + * any Unicode character, excluding the surrogate blocks, FFFE, and FFFF. + */ +#define IS_CHAR(c) xmlIsCharQ(c) + +/** + * IS_CHAR_CH: + * @c: an xmlChar (usually an unsigned char) + * + * Behaves like IS_CHAR on single-byte value + */ +#define IS_CHAR_CH(c) xmlIsChar_ch(c) + +/** + * IS_BLANK: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * [3] S ::= (#x20 | #x9 | #xD | #xA)+ + */ +#define IS_BLANK(c) xmlIsBlankQ(c) + +/** + * IS_BLANK_CH: + * @c: an xmlChar value (normally unsigned char) + * + * Behaviour same as IS_BLANK + */ +#define IS_BLANK_CH(c) xmlIsBlank_ch(c) + +/** + * IS_BASECHAR: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * [85] BaseChar ::= ... long list see REC ... + */ +#define IS_BASECHAR(c) xmlIsBaseCharQ(c) + +/** + * IS_DIGIT: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * [88] Digit ::= ... long list see REC ... + */ +#define IS_DIGIT(c) xmlIsDigitQ(c) + +/** + * IS_DIGIT_CH: + * @c: an xmlChar value (usually an unsigned char) + * + * Behaves like IS_DIGIT but with a single byte argument + */ +#define IS_DIGIT_CH(c) xmlIsDigit_ch(c) + +/** + * IS_COMBINING: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * [87] CombiningChar ::= ... long list see REC ... + */ +#define IS_COMBINING(c) xmlIsCombiningQ(c) + +/** + * IS_COMBINING_CH: + * @c: an xmlChar (usually an unsigned char) + * + * Always false (all combining chars > 0xff) + */ +#define IS_COMBINING_CH(c) 0 + +/** + * IS_EXTENDER: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * + * [89] Extender ::= #x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | + * #x0E46 | #x0EC6 | #x3005 | [#x3031-#x3035] | + * [#x309D-#x309E] | [#x30FC-#x30FE] + */ +#define IS_EXTENDER(c) xmlIsExtenderQ(c) + +/** + * IS_EXTENDER_CH: + * @c: an xmlChar value (usually an unsigned char) + * + * Behaves like IS_EXTENDER but with a single-byte argument + */ +#define IS_EXTENDER_CH(c) xmlIsExtender_ch(c) + +/** + * IS_IDEOGRAPHIC: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * + * [86] Ideographic ::= [#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029] + */ +#define IS_IDEOGRAPHIC(c) xmlIsIdeographicQ(c) + +/** + * IS_LETTER: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * + * [84] Letter ::= BaseChar | Ideographic + */ +#define IS_LETTER(c) (IS_BASECHAR(c) || IS_IDEOGRAPHIC(c)) + +/** + * IS_LETTER_CH: + * @c: an xmlChar value (normally unsigned char) + * + * Macro behaves like IS_LETTER, but only check base chars + * + */ +#define IS_LETTER_CH(c) xmlIsBaseChar_ch(c) + +/** + * IS_ASCII_LETTER: + * @c: an xmlChar value + * + * Macro to check [a-zA-Z] + * + */ +#define IS_ASCII_LETTER(c) (((0x41 <= (c)) && ((c) <= 0x5a)) || \ + ((0x61 <= (c)) && ((c) <= 0x7a))) + +/** + * IS_ASCII_DIGIT: + * @c: an xmlChar value + * + * Macro to check [0-9] + * + */ +#define IS_ASCII_DIGIT(c) ((0x30 <= (c)) && ((c) <= 0x39)) + +/** + * IS_PUBIDCHAR: + * @c: an UNICODE value (int) + * + * Macro to check the following production in the XML spec: + * + * + * [13] PubidChar ::= #x20 | #xD | #xA | [a-zA-Z0-9] | [-'()+,./:=?;!*#@$_%] + */ +#define IS_PUBIDCHAR(c) xmlIsPubidCharQ(c) + +/** + * IS_PUBIDCHAR_CH: + * @c: an xmlChar value (normally unsigned char) + * + * Same as IS_PUBIDCHAR but for single-byte value + */ +#define IS_PUBIDCHAR_CH(c) xmlIsPubidChar_ch(c) + +/** + * Global variables used for predefined strings. + */ +XMLPUBVAR const xmlChar xmlStringText[]; +XMLPUBVAR const xmlChar xmlStringTextNoenc[]; +XMLPUBVAR const xmlChar xmlStringComment[]; + +/* + * Function to finish the work of the macros where needed. + */ +XMLPUBFUN int xmlIsLetter (int c); + +/** + * Parser context. + */ +XMLPUBFUN xmlParserCtxtPtr + xmlCreateFileParserCtxt (const char *filename); +XMLPUBFUN xmlParserCtxtPtr + xmlCreateURLParserCtxt (const char *filename, + int options); +XMLPUBFUN xmlParserCtxtPtr + xmlCreateMemoryParserCtxt(const char *buffer, + int size); +XMLPUBFUN xmlParserCtxtPtr + xmlCreateEntityParserCtxt(const xmlChar *URL, + const xmlChar *ID, + const xmlChar *base); +XMLPUBFUN void + xmlCtxtErrMemory (xmlParserCtxtPtr ctxt); +XMLPUBFUN int + xmlSwitchEncoding (xmlParserCtxtPtr ctxt, + xmlCharEncoding enc); +XMLPUBFUN int + xmlSwitchEncodingName (xmlParserCtxtPtr ctxt, + const char *encoding); +XMLPUBFUN int + xmlSwitchToEncoding (xmlParserCtxtPtr ctxt, + xmlCharEncodingHandlerPtr handler); +XML_DEPRECATED +XMLPUBFUN int + xmlSwitchInputEncoding (xmlParserCtxtPtr ctxt, + xmlParserInputPtr input, + xmlCharEncodingHandlerPtr handler); + +/** + * Input Streams. + */ +XMLPUBFUN xmlParserInputPtr + xmlNewStringInputStream (xmlParserCtxtPtr ctxt, + const xmlChar *buffer); +XML_DEPRECATED +XMLPUBFUN xmlParserInputPtr + xmlNewEntityInputStream (xmlParserCtxtPtr ctxt, + xmlEntityPtr entity); +XMLPUBFUN int + xmlPushInput (xmlParserCtxtPtr ctxt, + xmlParserInputPtr input); +XMLPUBFUN xmlChar + xmlPopInput (xmlParserCtxtPtr ctxt); +XMLPUBFUN void + xmlFreeInputStream (xmlParserInputPtr input); +XMLPUBFUN xmlParserInputPtr + xmlNewInputFromFile (xmlParserCtxtPtr ctxt, + const char *filename); +XMLPUBFUN xmlParserInputPtr + xmlNewInputStream (xmlParserCtxtPtr ctxt); + +/** + * Namespaces. + */ +XMLPUBFUN xmlChar * + xmlSplitQName (xmlParserCtxtPtr ctxt, + const xmlChar *name, + xmlChar **prefix); + +/** + * Generic production rules. + */ +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlParseName (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseNmtoken (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseEntityValue (xmlParserCtxtPtr ctxt, + xmlChar **orig); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseAttValue (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseSystemLiteral (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParsePubidLiteral (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseCharData (xmlParserCtxtPtr ctxt, + int cdata); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseExternalID (xmlParserCtxtPtr ctxt, + xmlChar **publicID, + int strict); +XML_DEPRECATED +XMLPUBFUN void + xmlParseComment (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlParsePITarget (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParsePI (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseNotationDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseEntityDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int + xmlParseDefaultDecl (xmlParserCtxtPtr ctxt, + xmlChar **value); +XML_DEPRECATED +XMLPUBFUN xmlEnumerationPtr + xmlParseNotationType (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlEnumerationPtr + xmlParseEnumerationType (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int + xmlParseEnumeratedType (xmlParserCtxtPtr ctxt, + xmlEnumerationPtr *tree); +XML_DEPRECATED +XMLPUBFUN int + xmlParseAttributeType (xmlParserCtxtPtr ctxt, + xmlEnumerationPtr *tree); +XML_DEPRECATED +XMLPUBFUN void + xmlParseAttributeListDecl(xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlElementContentPtr + xmlParseElementMixedContentDecl + (xmlParserCtxtPtr ctxt, + int inputchk); +XML_DEPRECATED +XMLPUBFUN xmlElementContentPtr + xmlParseElementChildrenContentDecl + (xmlParserCtxtPtr ctxt, + int inputchk); +XML_DEPRECATED +XMLPUBFUN int + xmlParseElementContentDecl(xmlParserCtxtPtr ctxt, + const xmlChar *name, + xmlElementContentPtr *result); +XML_DEPRECATED +XMLPUBFUN int + xmlParseElementDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseMarkupDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int + xmlParseCharRef (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlEntityPtr + xmlParseEntityRef (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseReference (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParsePEReference (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseDocTypeDecl (xmlParserCtxtPtr ctxt); +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlParseAttribute (xmlParserCtxtPtr ctxt, + xmlChar **value); +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlParseStartTag (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseEndTag (xmlParserCtxtPtr ctxt); +#endif /* LIBXML_SAX1_ENABLED */ +XML_DEPRECATED +XMLPUBFUN void + xmlParseCDSect (xmlParserCtxtPtr ctxt); +XMLPUBFUN void + xmlParseContent (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseElement (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseVersionNum (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseVersionInfo (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseEncName (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlParseEncodingDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int + xmlParseSDDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseXMLDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseTextDecl (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseMisc (xmlParserCtxtPtr ctxt); +XMLPUBFUN void + xmlParseExternalSubset (xmlParserCtxtPtr ctxt, + const xmlChar *ExternalID, + const xmlChar *SystemID); +/** + * XML_SUBSTITUTE_NONE: + * + * If no entities need to be substituted. + */ +#define XML_SUBSTITUTE_NONE 0 +/** + * XML_SUBSTITUTE_REF: + * + * Whether general entities need to be substituted. + */ +#define XML_SUBSTITUTE_REF 1 +/** + * XML_SUBSTITUTE_PEREF: + * + * Whether parameter entities need to be substituted. + */ +#define XML_SUBSTITUTE_PEREF 2 +/** + * XML_SUBSTITUTE_BOTH: + * + * Both general and parameter entities need to be substituted. + */ +#define XML_SUBSTITUTE_BOTH 3 + +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlStringDecodeEntities (xmlParserCtxtPtr ctxt, + const xmlChar *str, + int what, + xmlChar end, + xmlChar end2, + xmlChar end3); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlStringLenDecodeEntities (xmlParserCtxtPtr ctxt, + const xmlChar *str, + int len, + int what, + xmlChar end, + xmlChar end2, + xmlChar end3); + +/* + * Generated by MACROS on top of parser.c c.f. PUSH_AND_POP. + */ +XML_DEPRECATED +XMLPUBFUN int nodePush (xmlParserCtxtPtr ctxt, + xmlNodePtr value); +XML_DEPRECATED +XMLPUBFUN xmlNodePtr nodePop (xmlParserCtxtPtr ctxt); +XMLPUBFUN int inputPush (xmlParserCtxtPtr ctxt, + xmlParserInputPtr value); +XMLPUBFUN xmlParserInputPtr inputPop (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN const xmlChar * namePop (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int namePush (xmlParserCtxtPtr ctxt, + const xmlChar *value); + +/* + * other commodities shared between parser.c and parserInternals. + */ +XML_DEPRECATED +XMLPUBFUN int xmlSkipBlankChars (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int xmlStringCurrentChar (xmlParserCtxtPtr ctxt, + const xmlChar *cur, + int *len); +XML_DEPRECATED +XMLPUBFUN void xmlParserHandlePEReference(xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN int xmlCheckLanguageID (const xmlChar *lang); + +/* + * Really core function shared with HTML parser. + */ +XML_DEPRECATED +XMLPUBFUN int xmlCurrentChar (xmlParserCtxtPtr ctxt, + int *len); +XMLPUBFUN int xmlCopyCharMultiByte (xmlChar *out, + int val); +XMLPUBFUN int xmlCopyChar (int len, + xmlChar *out, + int val); +XML_DEPRECATED +XMLPUBFUN void xmlNextChar (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void xmlParserInputShrink (xmlParserInputPtr in); + +/* + * Specific function to keep track of entities references + * and used by the XSLT debugger. + */ +#ifdef LIBXML_LEGACY_ENABLED +/** + * xmlEntityReferenceFunc: + * @ent: the entity + * @firstNode: the fist node in the chunk + * @lastNode: the last nod in the chunk + * + * Callback function used when one needs to be able to track back the + * provenance of a chunk of nodes inherited from an entity replacement. + */ +typedef void (*xmlEntityReferenceFunc) (xmlEntityPtr ent, + xmlNodePtr firstNode, + xmlNodePtr lastNode); + +XML_DEPRECATED +XMLPUBFUN void xmlSetEntityReferenceFunc (xmlEntityReferenceFunc func); + +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlParseQuotedString (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void + xmlParseNamespace (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlNamespaceParseNSDef (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlScanName (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlNamespaceParseNCName (xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN void xmlParserHandleReference(xmlParserCtxtPtr ctxt); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlNamespaceParseQName (xmlParserCtxtPtr ctxt, + xmlChar **prefix); +/** + * Entities + */ +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlDecodeEntities (xmlParserCtxtPtr ctxt, + int len, + int what, + xmlChar end, + xmlChar end2, + xmlChar end3); +XML_DEPRECATED +XMLPUBFUN void + xmlHandleEntity (xmlParserCtxtPtr ctxt, + xmlEntityPtr entity); + +#endif /* LIBXML_LEGACY_ENABLED */ + +#ifdef __cplusplus +} +#endif +#endif /* __XML_PARSER_INTERNALS_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/pattern.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/pattern.h new file mode 100644 index 000000000..947f0900a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/pattern.h @@ -0,0 +1,106 @@ +/* + * Summary: pattern expression handling + * Description: allows to compile and test pattern expressions for nodes + * either in a tree or based on a parser state. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_PATTERN_H__ +#define __XML_PATTERN_H__ + +#include +#include +#include + +#ifdef LIBXML_PATTERN_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlPattern: + * + * A compiled (XPath based) pattern to select nodes + */ +typedef struct _xmlPattern xmlPattern; +typedef xmlPattern *xmlPatternPtr; + +/** + * xmlPatternFlags: + * + * This is the set of options affecting the behaviour of pattern + * matching with this module + * + */ +typedef enum { + XML_PATTERN_DEFAULT = 0, /* simple pattern match */ + XML_PATTERN_XPATH = 1<<0, /* standard XPath pattern */ + XML_PATTERN_XSSEL = 1<<1, /* XPath subset for schema selector */ + XML_PATTERN_XSFIELD = 1<<2 /* XPath subset for schema field */ +} xmlPatternFlags; + +XMLPUBFUN void + xmlFreePattern (xmlPatternPtr comp); + +XMLPUBFUN void + xmlFreePatternList (xmlPatternPtr comp); + +XMLPUBFUN xmlPatternPtr + xmlPatterncompile (const xmlChar *pattern, + xmlDict *dict, + int flags, + const xmlChar **namespaces); +XMLPUBFUN int + xmlPatternCompileSafe (const xmlChar *pattern, + xmlDict *dict, + int flags, + const xmlChar **namespaces, + xmlPatternPtr *patternOut); +XMLPUBFUN int + xmlPatternMatch (xmlPatternPtr comp, + xmlNodePtr node); + +/* streaming interfaces */ +typedef struct _xmlStreamCtxt xmlStreamCtxt; +typedef xmlStreamCtxt *xmlStreamCtxtPtr; + +XMLPUBFUN int + xmlPatternStreamable (xmlPatternPtr comp); +XMLPUBFUN int + xmlPatternMaxDepth (xmlPatternPtr comp); +XMLPUBFUN int + xmlPatternMinDepth (xmlPatternPtr comp); +XMLPUBFUN int + xmlPatternFromRoot (xmlPatternPtr comp); +XMLPUBFUN xmlStreamCtxtPtr + xmlPatternGetStreamCtxt (xmlPatternPtr comp); +XMLPUBFUN void + xmlFreeStreamCtxt (xmlStreamCtxtPtr stream); +XMLPUBFUN int + xmlStreamPushNode (xmlStreamCtxtPtr stream, + const xmlChar *name, + const xmlChar *ns, + int nodeType); +XMLPUBFUN int + xmlStreamPush (xmlStreamCtxtPtr stream, + const xmlChar *name, + const xmlChar *ns); +XMLPUBFUN int + xmlStreamPushAttr (xmlStreamCtxtPtr stream, + const xmlChar *name, + const xmlChar *ns); +XMLPUBFUN int + xmlStreamPop (xmlStreamCtxtPtr stream); +XMLPUBFUN int + xmlStreamWantsAnyNode (xmlStreamCtxtPtr stream); +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_PATTERN_ENABLED */ + +#endif /* __XML_PATTERN_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/relaxng.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/relaxng.h new file mode 100644 index 000000000..079b7f125 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/relaxng.h @@ -0,0 +1,219 @@ +/* + * Summary: implementation of the Relax-NG validation + * Description: implementation of the Relax-NG validation + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_RELAX_NG__ +#define __XML_RELAX_NG__ + +#include +#include +#include +#include + +#ifdef LIBXML_SCHEMAS_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct _xmlRelaxNG xmlRelaxNG; +typedef xmlRelaxNG *xmlRelaxNGPtr; + + +/** + * xmlRelaxNGValidityErrorFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of an error callback from a Relax-NG validation + */ +typedef void (*xmlRelaxNGValidityErrorFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * xmlRelaxNGValidityWarningFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of a warning callback from a Relax-NG validation + */ +typedef void (*xmlRelaxNGValidityWarningFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * A schemas validation context + */ +typedef struct _xmlRelaxNGParserCtxt xmlRelaxNGParserCtxt; +typedef xmlRelaxNGParserCtxt *xmlRelaxNGParserCtxtPtr; + +typedef struct _xmlRelaxNGValidCtxt xmlRelaxNGValidCtxt; +typedef xmlRelaxNGValidCtxt *xmlRelaxNGValidCtxtPtr; + +/* + * xmlRelaxNGValidErr: + * + * List of possible Relax NG validation errors + */ +typedef enum { + XML_RELAXNG_OK = 0, + XML_RELAXNG_ERR_MEMORY, + XML_RELAXNG_ERR_TYPE, + XML_RELAXNG_ERR_TYPEVAL, + XML_RELAXNG_ERR_DUPID, + XML_RELAXNG_ERR_TYPECMP, + XML_RELAXNG_ERR_NOSTATE, + XML_RELAXNG_ERR_NODEFINE, + XML_RELAXNG_ERR_LISTEXTRA, + XML_RELAXNG_ERR_LISTEMPTY, + XML_RELAXNG_ERR_INTERNODATA, + XML_RELAXNG_ERR_INTERSEQ, + XML_RELAXNG_ERR_INTEREXTRA, + XML_RELAXNG_ERR_ELEMNAME, + XML_RELAXNG_ERR_ATTRNAME, + XML_RELAXNG_ERR_ELEMNONS, + XML_RELAXNG_ERR_ATTRNONS, + XML_RELAXNG_ERR_ELEMWRONGNS, + XML_RELAXNG_ERR_ATTRWRONGNS, + XML_RELAXNG_ERR_ELEMEXTRANS, + XML_RELAXNG_ERR_ATTREXTRANS, + XML_RELAXNG_ERR_ELEMNOTEMPTY, + XML_RELAXNG_ERR_NOELEM, + XML_RELAXNG_ERR_NOTELEM, + XML_RELAXNG_ERR_ATTRVALID, + XML_RELAXNG_ERR_CONTENTVALID, + XML_RELAXNG_ERR_EXTRACONTENT, + XML_RELAXNG_ERR_INVALIDATTR, + XML_RELAXNG_ERR_DATAELEM, + XML_RELAXNG_ERR_VALELEM, + XML_RELAXNG_ERR_LISTELEM, + XML_RELAXNG_ERR_DATATYPE, + XML_RELAXNG_ERR_VALUE, + XML_RELAXNG_ERR_LIST, + XML_RELAXNG_ERR_NOGRAMMAR, + XML_RELAXNG_ERR_EXTRADATA, + XML_RELAXNG_ERR_LACKDATA, + XML_RELAXNG_ERR_INTERNAL, + XML_RELAXNG_ERR_ELEMWRONG, + XML_RELAXNG_ERR_TEXTWRONG +} xmlRelaxNGValidErr; + +/* + * xmlRelaxNGParserFlags: + * + * List of possible Relax NG Parser flags + */ +typedef enum { + XML_RELAXNGP_NONE = 0, + XML_RELAXNGP_FREE_DOC = 1, + XML_RELAXNGP_CRNG = 2 +} xmlRelaxNGParserFlag; + +XMLPUBFUN int + xmlRelaxNGInitTypes (void); +XML_DEPRECATED +XMLPUBFUN void + xmlRelaxNGCleanupTypes (void); + +/* + * Interfaces for parsing. + */ +XMLPUBFUN xmlRelaxNGParserCtxtPtr + xmlRelaxNGNewParserCtxt (const char *URL); +XMLPUBFUN xmlRelaxNGParserCtxtPtr + xmlRelaxNGNewMemParserCtxt (const char *buffer, + int size); +XMLPUBFUN xmlRelaxNGParserCtxtPtr + xmlRelaxNGNewDocParserCtxt (xmlDocPtr doc); + +XMLPUBFUN int + xmlRelaxParserSetFlag (xmlRelaxNGParserCtxtPtr ctxt, + int flag); + +XMLPUBFUN void + xmlRelaxNGFreeParserCtxt (xmlRelaxNGParserCtxtPtr ctxt); +XMLPUBFUN void + xmlRelaxNGSetParserErrors(xmlRelaxNGParserCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc err, + xmlRelaxNGValidityWarningFunc warn, + void *ctx); +XMLPUBFUN int + xmlRelaxNGGetParserErrors(xmlRelaxNGParserCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc *err, + xmlRelaxNGValidityWarningFunc *warn, + void **ctx); +XMLPUBFUN void + xmlRelaxNGSetParserStructuredErrors( + xmlRelaxNGParserCtxtPtr ctxt, + xmlStructuredErrorFunc serror, + void *ctx); +XMLPUBFUN xmlRelaxNGPtr + xmlRelaxNGParse (xmlRelaxNGParserCtxtPtr ctxt); +XMLPUBFUN void + xmlRelaxNGFree (xmlRelaxNGPtr schema); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlRelaxNGDump (FILE *output, + xmlRelaxNGPtr schema); +XMLPUBFUN void + xmlRelaxNGDumpTree (FILE * output, + xmlRelaxNGPtr schema); +#endif /* LIBXML_OUTPUT_ENABLED */ +/* + * Interfaces for validating + */ +XMLPUBFUN void + xmlRelaxNGSetValidErrors(xmlRelaxNGValidCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc err, + xmlRelaxNGValidityWarningFunc warn, + void *ctx); +XMLPUBFUN int + xmlRelaxNGGetValidErrors(xmlRelaxNGValidCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc *err, + xmlRelaxNGValidityWarningFunc *warn, + void **ctx); +XMLPUBFUN void + xmlRelaxNGSetValidStructuredErrors(xmlRelaxNGValidCtxtPtr ctxt, + xmlStructuredErrorFunc serror, void *ctx); +XMLPUBFUN xmlRelaxNGValidCtxtPtr + xmlRelaxNGNewValidCtxt (xmlRelaxNGPtr schema); +XMLPUBFUN void + xmlRelaxNGFreeValidCtxt (xmlRelaxNGValidCtxtPtr ctxt); +XMLPUBFUN int + xmlRelaxNGValidateDoc (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc); +/* + * Interfaces for progressive validation when possible + */ +XMLPUBFUN int + xmlRelaxNGValidatePushElement (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); +XMLPUBFUN int + xmlRelaxNGValidatePushCData (xmlRelaxNGValidCtxtPtr ctxt, + const xmlChar *data, + int len); +XMLPUBFUN int + xmlRelaxNGValidatePopElement (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); +XMLPUBFUN int + xmlRelaxNGValidateFullElement (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMAS_ENABLED */ + +#endif /* __XML_RELAX_NG__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schemasInternals.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schemasInternals.h new file mode 100644 index 000000000..e9d3b3c7a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schemasInternals.h @@ -0,0 +1,959 @@ +/* + * Summary: internal interfaces for XML Schemas + * Description: internal interfaces for the XML Schemas handling + * and schema validity checking + * The Schemas development is a Work In Progress. + * Some of those interfaces are not guaranteed to be API or ABI stable ! + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SCHEMA_INTERNALS_H__ +#define __XML_SCHEMA_INTERNALS_H__ + +#include + +#ifdef LIBXML_SCHEMAS_ENABLED + +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum { + XML_SCHEMAS_UNKNOWN = 0, + XML_SCHEMAS_STRING = 1, + XML_SCHEMAS_NORMSTRING = 2, + XML_SCHEMAS_DECIMAL = 3, + XML_SCHEMAS_TIME = 4, + XML_SCHEMAS_GDAY = 5, + XML_SCHEMAS_GMONTH = 6, + XML_SCHEMAS_GMONTHDAY = 7, + XML_SCHEMAS_GYEAR = 8, + XML_SCHEMAS_GYEARMONTH = 9, + XML_SCHEMAS_DATE = 10, + XML_SCHEMAS_DATETIME = 11, + XML_SCHEMAS_DURATION = 12, + XML_SCHEMAS_FLOAT = 13, + XML_SCHEMAS_DOUBLE = 14, + XML_SCHEMAS_BOOLEAN = 15, + XML_SCHEMAS_TOKEN = 16, + XML_SCHEMAS_LANGUAGE = 17, + XML_SCHEMAS_NMTOKEN = 18, + XML_SCHEMAS_NMTOKENS = 19, + XML_SCHEMAS_NAME = 20, + XML_SCHEMAS_QNAME = 21, + XML_SCHEMAS_NCNAME = 22, + XML_SCHEMAS_ID = 23, + XML_SCHEMAS_IDREF = 24, + XML_SCHEMAS_IDREFS = 25, + XML_SCHEMAS_ENTITY = 26, + XML_SCHEMAS_ENTITIES = 27, + XML_SCHEMAS_NOTATION = 28, + XML_SCHEMAS_ANYURI = 29, + XML_SCHEMAS_INTEGER = 30, + XML_SCHEMAS_NPINTEGER = 31, + XML_SCHEMAS_NINTEGER = 32, + XML_SCHEMAS_NNINTEGER = 33, + XML_SCHEMAS_PINTEGER = 34, + XML_SCHEMAS_INT = 35, + XML_SCHEMAS_UINT = 36, + XML_SCHEMAS_LONG = 37, + XML_SCHEMAS_ULONG = 38, + XML_SCHEMAS_SHORT = 39, + XML_SCHEMAS_USHORT = 40, + XML_SCHEMAS_BYTE = 41, + XML_SCHEMAS_UBYTE = 42, + XML_SCHEMAS_HEXBINARY = 43, + XML_SCHEMAS_BASE64BINARY = 44, + XML_SCHEMAS_ANYTYPE = 45, + XML_SCHEMAS_ANYSIMPLETYPE = 46 +} xmlSchemaValType; + +/* + * XML Schemas defines multiple type of types. + */ +typedef enum { + XML_SCHEMA_TYPE_BASIC = 1, /* A built-in datatype */ + XML_SCHEMA_TYPE_ANY, + XML_SCHEMA_TYPE_FACET, + XML_SCHEMA_TYPE_SIMPLE, + XML_SCHEMA_TYPE_COMPLEX, + XML_SCHEMA_TYPE_SEQUENCE = 6, + XML_SCHEMA_TYPE_CHOICE, + XML_SCHEMA_TYPE_ALL, + XML_SCHEMA_TYPE_SIMPLE_CONTENT, + XML_SCHEMA_TYPE_COMPLEX_CONTENT, + XML_SCHEMA_TYPE_UR, + XML_SCHEMA_TYPE_RESTRICTION, + XML_SCHEMA_TYPE_EXTENSION, + XML_SCHEMA_TYPE_ELEMENT, + XML_SCHEMA_TYPE_ATTRIBUTE, + XML_SCHEMA_TYPE_ATTRIBUTEGROUP, + XML_SCHEMA_TYPE_GROUP, + XML_SCHEMA_TYPE_NOTATION, + XML_SCHEMA_TYPE_LIST, + XML_SCHEMA_TYPE_UNION, + XML_SCHEMA_TYPE_ANY_ATTRIBUTE, + XML_SCHEMA_TYPE_IDC_UNIQUE, + XML_SCHEMA_TYPE_IDC_KEY, + XML_SCHEMA_TYPE_IDC_KEYREF, + XML_SCHEMA_TYPE_PARTICLE = 25, + XML_SCHEMA_TYPE_ATTRIBUTE_USE, + XML_SCHEMA_FACET_MININCLUSIVE = 1000, + XML_SCHEMA_FACET_MINEXCLUSIVE, + XML_SCHEMA_FACET_MAXINCLUSIVE, + XML_SCHEMA_FACET_MAXEXCLUSIVE, + XML_SCHEMA_FACET_TOTALDIGITS, + XML_SCHEMA_FACET_FRACTIONDIGITS, + XML_SCHEMA_FACET_PATTERN, + XML_SCHEMA_FACET_ENUMERATION, + XML_SCHEMA_FACET_WHITESPACE, + XML_SCHEMA_FACET_LENGTH, + XML_SCHEMA_FACET_MAXLENGTH, + XML_SCHEMA_FACET_MINLENGTH, + XML_SCHEMA_EXTRA_QNAMEREF = 2000, + XML_SCHEMA_EXTRA_ATTR_USE_PROHIB +} xmlSchemaTypeType; + +typedef enum { + XML_SCHEMA_CONTENT_UNKNOWN = 0, + XML_SCHEMA_CONTENT_EMPTY = 1, + XML_SCHEMA_CONTENT_ELEMENTS, + XML_SCHEMA_CONTENT_MIXED, + XML_SCHEMA_CONTENT_SIMPLE, + XML_SCHEMA_CONTENT_MIXED_OR_ELEMENTS, /* Obsolete */ + XML_SCHEMA_CONTENT_BASIC, + XML_SCHEMA_CONTENT_ANY +} xmlSchemaContentType; + +typedef struct _xmlSchemaVal xmlSchemaVal; +typedef xmlSchemaVal *xmlSchemaValPtr; + +typedef struct _xmlSchemaType xmlSchemaType; +typedef xmlSchemaType *xmlSchemaTypePtr; + +typedef struct _xmlSchemaFacet xmlSchemaFacet; +typedef xmlSchemaFacet *xmlSchemaFacetPtr; + +/** + * Annotation + */ +typedef struct _xmlSchemaAnnot xmlSchemaAnnot; +typedef xmlSchemaAnnot *xmlSchemaAnnotPtr; +struct _xmlSchemaAnnot { + struct _xmlSchemaAnnot *next; + xmlNodePtr content; /* the annotation */ +}; + +/** + * XML_SCHEMAS_ANYATTR_SKIP: + * + * Skip unknown attribute from validation + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ANYATTR_SKIP 1 +/** + * XML_SCHEMAS_ANYATTR_LAX: + * + * Ignore validation non definition on attributes + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ANYATTR_LAX 2 +/** + * XML_SCHEMAS_ANYATTR_STRICT: + * + * Apply strict validation rules on attributes + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ANYATTR_STRICT 3 +/** + * XML_SCHEMAS_ANY_SKIP: + * + * Skip unknown attribute from validation + */ +#define XML_SCHEMAS_ANY_SKIP 1 +/** + * XML_SCHEMAS_ANY_LAX: + * + * Used by wildcards. + * Validate if type found, don't worry if not found + */ +#define XML_SCHEMAS_ANY_LAX 2 +/** + * XML_SCHEMAS_ANY_STRICT: + * + * Used by wildcards. + * Apply strict validation rules + */ +#define XML_SCHEMAS_ANY_STRICT 3 +/** + * XML_SCHEMAS_ATTR_USE_PROHIBITED: + * + * Used by wildcards. + * The attribute is prohibited. + */ +#define XML_SCHEMAS_ATTR_USE_PROHIBITED 0 +/** + * XML_SCHEMAS_ATTR_USE_REQUIRED: + * + * The attribute is required. + */ +#define XML_SCHEMAS_ATTR_USE_REQUIRED 1 +/** + * XML_SCHEMAS_ATTR_USE_OPTIONAL: + * + * The attribute is optional. + */ +#define XML_SCHEMAS_ATTR_USE_OPTIONAL 2 +/** + * XML_SCHEMAS_ATTR_GLOBAL: + * + * allow elements in no namespace + */ +#define XML_SCHEMAS_ATTR_GLOBAL 1 << 0 +/** + * XML_SCHEMAS_ATTR_NSDEFAULT: + * + * allow elements in no namespace + */ +#define XML_SCHEMAS_ATTR_NSDEFAULT 1 << 7 +/** + * XML_SCHEMAS_ATTR_INTERNAL_RESOLVED: + * + * this is set when the "type" and "ref" references + * have been resolved. + */ +#define XML_SCHEMAS_ATTR_INTERNAL_RESOLVED 1 << 8 +/** + * XML_SCHEMAS_ATTR_FIXED: + * + * the attribute has a fixed value + */ +#define XML_SCHEMAS_ATTR_FIXED 1 << 9 + +/** + * xmlSchemaAttribute: + * An attribute definition. + */ + +typedef struct _xmlSchemaAttribute xmlSchemaAttribute; +typedef xmlSchemaAttribute *xmlSchemaAttributePtr; +struct _xmlSchemaAttribute { + xmlSchemaTypeType type; + struct _xmlSchemaAttribute *next; /* the next attribute (not used?) */ + const xmlChar *name; /* the name of the declaration */ + const xmlChar *id; /* Deprecated; not used */ + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + const xmlChar *typeName; /* the local name of the type definition */ + const xmlChar *typeNs; /* the ns URI of the type definition */ + xmlSchemaAnnotPtr annot; + + xmlSchemaTypePtr base; /* Deprecated; not used */ + int occurs; /* Deprecated; not used */ + const xmlChar *defValue; /* The initial value of the value constraint */ + xmlSchemaTypePtr subtypes; /* the type definition */ + xmlNodePtr node; + const xmlChar *targetNamespace; + int flags; + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaValPtr defVal; /* The compiled value constraint */ + xmlSchemaAttributePtr refDecl; /* Deprecated; not used */ +}; + +/** + * xmlSchemaAttributeLink: + * Used to build a list of attribute uses on complexType definitions. + * WARNING: Deprecated; not used. + */ +typedef struct _xmlSchemaAttributeLink xmlSchemaAttributeLink; +typedef xmlSchemaAttributeLink *xmlSchemaAttributeLinkPtr; +struct _xmlSchemaAttributeLink { + struct _xmlSchemaAttributeLink *next;/* the next attribute link ... */ + struct _xmlSchemaAttribute *attr;/* the linked attribute */ +}; + +/** + * XML_SCHEMAS_WILDCARD_COMPLETE: + * + * If the wildcard is complete. + */ +#define XML_SCHEMAS_WILDCARD_COMPLETE 1 << 0 + +/** + * xmlSchemaCharValueLink: + * Used to build a list of namespaces on wildcards. + */ +typedef struct _xmlSchemaWildcardNs xmlSchemaWildcardNs; +typedef xmlSchemaWildcardNs *xmlSchemaWildcardNsPtr; +struct _xmlSchemaWildcardNs { + struct _xmlSchemaWildcardNs *next;/* the next constraint link ... */ + const xmlChar *value;/* the value */ +}; + +/** + * xmlSchemaWildcard. + * A wildcard. + */ +typedef struct _xmlSchemaWildcard xmlSchemaWildcard; +typedef xmlSchemaWildcard *xmlSchemaWildcardPtr; +struct _xmlSchemaWildcard { + xmlSchemaTypeType type; /* The kind of type */ + const xmlChar *id; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + xmlNodePtr node; + int minOccurs; /* Deprecated; not used */ + int maxOccurs; /* Deprecated; not used */ + int processContents; + int any; /* Indicates if the ns constraint is of ##any */ + xmlSchemaWildcardNsPtr nsSet; /* The list of allowed namespaces */ + xmlSchemaWildcardNsPtr negNsSet; /* The negated namespace */ + int flags; +}; + +/** + * XML_SCHEMAS_ATTRGROUP_WILDCARD_BUILDED: + * + * The attribute wildcard has been built. + */ +#define XML_SCHEMAS_ATTRGROUP_WILDCARD_BUILDED 1 << 0 +/** + * XML_SCHEMAS_ATTRGROUP_GLOBAL: + * + * The attribute group has been defined. + */ +#define XML_SCHEMAS_ATTRGROUP_GLOBAL 1 << 1 +/** + * XML_SCHEMAS_ATTRGROUP_MARKED: + * + * Marks the attr group as marked; used for circular checks. + */ +#define XML_SCHEMAS_ATTRGROUP_MARKED 1 << 2 + +/** + * XML_SCHEMAS_ATTRGROUP_REDEFINED: + * + * The attr group was redefined. + */ +#define XML_SCHEMAS_ATTRGROUP_REDEFINED 1 << 3 +/** + * XML_SCHEMAS_ATTRGROUP_HAS_REFS: + * + * Whether this attr. group contains attr. group references. + */ +#define XML_SCHEMAS_ATTRGROUP_HAS_REFS 1 << 4 + +/** + * An attribute group definition. + * + * xmlSchemaAttribute and xmlSchemaAttributeGroup start of structures + * must be kept similar + */ +typedef struct _xmlSchemaAttributeGroup xmlSchemaAttributeGroup; +typedef xmlSchemaAttributeGroup *xmlSchemaAttributeGroupPtr; +struct _xmlSchemaAttributeGroup { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaAttribute *next;/* the next attribute if in a group ... */ + const xmlChar *name; + const xmlChar *id; + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + + xmlSchemaAttributePtr attributes; /* Deprecated; not used */ + xmlNodePtr node; + int flags; + xmlSchemaWildcardPtr attributeWildcard; + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaAttributeGroupPtr refItem; /* Deprecated; not used */ + const xmlChar *targetNamespace; + void *attrUses; +}; + +/** + * xmlSchemaTypeLink: + * Used to build a list of types (e.g. member types of + * simpleType with variety "union"). + */ +typedef struct _xmlSchemaTypeLink xmlSchemaTypeLink; +typedef xmlSchemaTypeLink *xmlSchemaTypeLinkPtr; +struct _xmlSchemaTypeLink { + struct _xmlSchemaTypeLink *next;/* the next type link ... */ + xmlSchemaTypePtr type;/* the linked type */ +}; + +/** + * xmlSchemaFacetLink: + * Used to build a list of facets. + */ +typedef struct _xmlSchemaFacetLink xmlSchemaFacetLink; +typedef xmlSchemaFacetLink *xmlSchemaFacetLinkPtr; +struct _xmlSchemaFacetLink { + struct _xmlSchemaFacetLink *next;/* the next facet link ... */ + xmlSchemaFacetPtr facet;/* the linked facet */ +}; + +/** + * XML_SCHEMAS_TYPE_MIXED: + * + * the element content type is mixed + */ +#define XML_SCHEMAS_TYPE_MIXED 1 << 0 +/** + * XML_SCHEMAS_TYPE_DERIVATION_METHOD_EXTENSION: + * + * the simple or complex type has a derivation method of "extension". + */ +#define XML_SCHEMAS_TYPE_DERIVATION_METHOD_EXTENSION 1 << 1 +/** + * XML_SCHEMAS_TYPE_DERIVATION_METHOD_RESTRICTION: + * + * the simple or complex type has a derivation method of "restriction". + */ +#define XML_SCHEMAS_TYPE_DERIVATION_METHOD_RESTRICTION 1 << 2 +/** + * XML_SCHEMAS_TYPE_GLOBAL: + * + * the type is global + */ +#define XML_SCHEMAS_TYPE_GLOBAL 1 << 3 +/** + * XML_SCHEMAS_TYPE_OWNED_ATTR_WILDCARD: + * + * the complexType owns an attribute wildcard, i.e. + * it can be freed by the complexType + */ +#define XML_SCHEMAS_TYPE_OWNED_ATTR_WILDCARD 1 << 4 /* Obsolete. */ +/** + * XML_SCHEMAS_TYPE_VARIETY_ABSENT: + * + * the simpleType has a variety of "absent". + * TODO: Actually not necessary :-/, since if + * none of the variety flags occur then it's + * automatically absent. + */ +#define XML_SCHEMAS_TYPE_VARIETY_ABSENT 1 << 5 +/** + * XML_SCHEMAS_TYPE_VARIETY_LIST: + * + * the simpleType has a variety of "list". + */ +#define XML_SCHEMAS_TYPE_VARIETY_LIST 1 << 6 +/** + * XML_SCHEMAS_TYPE_VARIETY_UNION: + * + * the simpleType has a variety of "union". + */ +#define XML_SCHEMAS_TYPE_VARIETY_UNION 1 << 7 +/** + * XML_SCHEMAS_TYPE_VARIETY_ATOMIC: + * + * the simpleType has a variety of "union". + */ +#define XML_SCHEMAS_TYPE_VARIETY_ATOMIC 1 << 8 +/** + * XML_SCHEMAS_TYPE_FINAL_EXTENSION: + * + * the complexType has a final of "extension". + */ +#define XML_SCHEMAS_TYPE_FINAL_EXTENSION 1 << 9 +/** + * XML_SCHEMAS_TYPE_FINAL_RESTRICTION: + * + * the simpleType/complexType has a final of "restriction". + */ +#define XML_SCHEMAS_TYPE_FINAL_RESTRICTION 1 << 10 +/** + * XML_SCHEMAS_TYPE_FINAL_LIST: + * + * the simpleType has a final of "list". + */ +#define XML_SCHEMAS_TYPE_FINAL_LIST 1 << 11 +/** + * XML_SCHEMAS_TYPE_FINAL_UNION: + * + * the simpleType has a final of "union". + */ +#define XML_SCHEMAS_TYPE_FINAL_UNION 1 << 12 +/** + * XML_SCHEMAS_TYPE_FINAL_DEFAULT: + * + * the simpleType has a final of "default". + */ +#define XML_SCHEMAS_TYPE_FINAL_DEFAULT 1 << 13 +/** + * XML_SCHEMAS_TYPE_BUILTIN_PRIMITIVE: + * + * Marks the item as a builtin primitive. + */ +#define XML_SCHEMAS_TYPE_BUILTIN_PRIMITIVE 1 << 14 +/** + * XML_SCHEMAS_TYPE_MARKED: + * + * Marks the item as marked; used for circular checks. + */ +#define XML_SCHEMAS_TYPE_MARKED 1 << 16 +/** + * XML_SCHEMAS_TYPE_BLOCK_DEFAULT: + * + * the complexType did not specify 'block' so use the default of the + * item. + */ +#define XML_SCHEMAS_TYPE_BLOCK_DEFAULT 1 << 17 +/** + * XML_SCHEMAS_TYPE_BLOCK_EXTENSION: + * + * the complexType has a 'block' of "extension". + */ +#define XML_SCHEMAS_TYPE_BLOCK_EXTENSION 1 << 18 +/** + * XML_SCHEMAS_TYPE_BLOCK_RESTRICTION: + * + * the complexType has a 'block' of "restriction". + */ +#define XML_SCHEMAS_TYPE_BLOCK_RESTRICTION 1 << 19 +/** + * XML_SCHEMAS_TYPE_ABSTRACT: + * + * the simple/complexType is abstract. + */ +#define XML_SCHEMAS_TYPE_ABSTRACT 1 << 20 +/** + * XML_SCHEMAS_TYPE_FACETSNEEDVALUE: + * + * indicates if the facets need a computed value + */ +#define XML_SCHEMAS_TYPE_FACETSNEEDVALUE 1 << 21 +/** + * XML_SCHEMAS_TYPE_INTERNAL_RESOLVED: + * + * indicates that the type was typefixed + */ +#define XML_SCHEMAS_TYPE_INTERNAL_RESOLVED 1 << 22 +/** + * XML_SCHEMAS_TYPE_INTERNAL_INVALID: + * + * indicates that the type is invalid + */ +#define XML_SCHEMAS_TYPE_INTERNAL_INVALID 1 << 23 +/** + * XML_SCHEMAS_TYPE_WHITESPACE_PRESERVE: + * + * a whitespace-facet value of "preserve" + */ +#define XML_SCHEMAS_TYPE_WHITESPACE_PRESERVE 1 << 24 +/** + * XML_SCHEMAS_TYPE_WHITESPACE_REPLACE: + * + * a whitespace-facet value of "replace" + */ +#define XML_SCHEMAS_TYPE_WHITESPACE_REPLACE 1 << 25 +/** + * XML_SCHEMAS_TYPE_WHITESPACE_COLLAPSE: + * + * a whitespace-facet value of "collapse" + */ +#define XML_SCHEMAS_TYPE_WHITESPACE_COLLAPSE 1 << 26 +/** + * XML_SCHEMAS_TYPE_HAS_FACETS: + * + * has facets + */ +#define XML_SCHEMAS_TYPE_HAS_FACETS 1 << 27 +/** + * XML_SCHEMAS_TYPE_NORMVALUENEEDED: + * + * indicates if the facets (pattern) need a normalized value + */ +#define XML_SCHEMAS_TYPE_NORMVALUENEEDED 1 << 28 + +/** + * XML_SCHEMAS_TYPE_FIXUP_1: + * + * First stage of fixup was done. + */ +#define XML_SCHEMAS_TYPE_FIXUP_1 1 << 29 + +/** + * XML_SCHEMAS_TYPE_REDEFINED: + * + * The type was redefined. + */ +#define XML_SCHEMAS_TYPE_REDEFINED 1 << 30 +/** + * XML_SCHEMAS_TYPE_REDEFINING: + * + * The type redefines an other type. + */ +/* #define XML_SCHEMAS_TYPE_REDEFINING 1 << 31 */ + +/** + * _xmlSchemaType: + * + * Schemas type definition. + */ +struct _xmlSchemaType { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaType *next; /* the next type if in a sequence ... */ + const xmlChar *name; + const xmlChar *id ; /* Deprecated; not used */ + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + xmlSchemaTypePtr subtypes; + xmlSchemaAttributePtr attributes; /* Deprecated; not used */ + xmlNodePtr node; + int minOccurs; /* Deprecated; not used */ + int maxOccurs; /* Deprecated; not used */ + + int flags; + xmlSchemaContentType contentType; + const xmlChar *base; /* Base type's local name */ + const xmlChar *baseNs; /* Base type's target namespace */ + xmlSchemaTypePtr baseType; /* The base type component */ + xmlSchemaFacetPtr facets; /* Local facets */ + struct _xmlSchemaType *redef; /* Deprecated; not used */ + int recurse; /* Obsolete */ + xmlSchemaAttributeLinkPtr *attributeUses; /* Deprecated; not used */ + xmlSchemaWildcardPtr attributeWildcard; + int builtInType; /* Type of built-in types. */ + xmlSchemaTypeLinkPtr memberTypes; /* member-types if a union type. */ + xmlSchemaFacetLinkPtr facetSet; /* All facets (incl. inherited) */ + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaTypePtr contentTypeDef; /* Used for the simple content of complex types. + Could we use @subtypes for this? */ + xmlRegexpPtr contModel; /* Holds the automaton of the content model */ + const xmlChar *targetNamespace; + void *attrUses; +}; + +/* + * xmlSchemaElement: + * An element definition. + * + * xmlSchemaType, xmlSchemaFacet and xmlSchemaElement start of + * structures must be kept similar + */ +/** + * XML_SCHEMAS_ELEM_NILLABLE: + * + * the element is nillable + */ +#define XML_SCHEMAS_ELEM_NILLABLE 1 << 0 +/** + * XML_SCHEMAS_ELEM_GLOBAL: + * + * the element is global + */ +#define XML_SCHEMAS_ELEM_GLOBAL 1 << 1 +/** + * XML_SCHEMAS_ELEM_DEFAULT: + * + * the element has a default value + */ +#define XML_SCHEMAS_ELEM_DEFAULT 1 << 2 +/** + * XML_SCHEMAS_ELEM_FIXED: + * + * the element has a fixed value + */ +#define XML_SCHEMAS_ELEM_FIXED 1 << 3 +/** + * XML_SCHEMAS_ELEM_ABSTRACT: + * + * the element is abstract + */ +#define XML_SCHEMAS_ELEM_ABSTRACT 1 << 4 +/** + * XML_SCHEMAS_ELEM_TOPLEVEL: + * + * the element is top level + * obsolete: use XML_SCHEMAS_ELEM_GLOBAL instead + */ +#define XML_SCHEMAS_ELEM_TOPLEVEL 1 << 5 +/** + * XML_SCHEMAS_ELEM_REF: + * + * the element is a reference to a type + */ +#define XML_SCHEMAS_ELEM_REF 1 << 6 +/** + * XML_SCHEMAS_ELEM_NSDEFAULT: + * + * allow elements in no namespace + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ELEM_NSDEFAULT 1 << 7 +/** + * XML_SCHEMAS_ELEM_INTERNAL_RESOLVED: + * + * this is set when "type", "ref", "substitutionGroup" + * references have been resolved. + */ +#define XML_SCHEMAS_ELEM_INTERNAL_RESOLVED 1 << 8 + /** + * XML_SCHEMAS_ELEM_CIRCULAR: + * + * a helper flag for the search of circular references. + */ +#define XML_SCHEMAS_ELEM_CIRCULAR 1 << 9 +/** + * XML_SCHEMAS_ELEM_BLOCK_ABSENT: + * + * the "block" attribute is absent + */ +#define XML_SCHEMAS_ELEM_BLOCK_ABSENT 1 << 10 +/** + * XML_SCHEMAS_ELEM_BLOCK_EXTENSION: + * + * disallowed substitutions are absent + */ +#define XML_SCHEMAS_ELEM_BLOCK_EXTENSION 1 << 11 +/** + * XML_SCHEMAS_ELEM_BLOCK_RESTRICTION: + * + * disallowed substitutions: "restriction" + */ +#define XML_SCHEMAS_ELEM_BLOCK_RESTRICTION 1 << 12 +/** + * XML_SCHEMAS_ELEM_BLOCK_SUBSTITUTION: + * + * disallowed substitutions: "substitution" + */ +#define XML_SCHEMAS_ELEM_BLOCK_SUBSTITUTION 1 << 13 +/** + * XML_SCHEMAS_ELEM_FINAL_ABSENT: + * + * substitution group exclusions are absent + */ +#define XML_SCHEMAS_ELEM_FINAL_ABSENT 1 << 14 +/** + * XML_SCHEMAS_ELEM_FINAL_EXTENSION: + * + * substitution group exclusions: "extension" + */ +#define XML_SCHEMAS_ELEM_FINAL_EXTENSION 1 << 15 +/** + * XML_SCHEMAS_ELEM_FINAL_RESTRICTION: + * + * substitution group exclusions: "restriction" + */ +#define XML_SCHEMAS_ELEM_FINAL_RESTRICTION 1 << 16 +/** + * XML_SCHEMAS_ELEM_SUBST_GROUP_HEAD: + * + * the declaration is a substitution group head + */ +#define XML_SCHEMAS_ELEM_SUBST_GROUP_HEAD 1 << 17 +/** + * XML_SCHEMAS_ELEM_INTERNAL_CHECKED: + * + * this is set when the elem decl has been checked against + * all constraints + */ +#define XML_SCHEMAS_ELEM_INTERNAL_CHECKED 1 << 18 + +typedef struct _xmlSchemaElement xmlSchemaElement; +typedef xmlSchemaElement *xmlSchemaElementPtr; +struct _xmlSchemaElement { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaType *next; /* Not used? */ + const xmlChar *name; + const xmlChar *id; /* Deprecated; not used */ + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + xmlSchemaTypePtr subtypes; /* the type definition */ + xmlSchemaAttributePtr attributes; + xmlNodePtr node; + int minOccurs; /* Deprecated; not used */ + int maxOccurs; /* Deprecated; not used */ + + int flags; + const xmlChar *targetNamespace; + const xmlChar *namedType; + const xmlChar *namedTypeNs; + const xmlChar *substGroup; + const xmlChar *substGroupNs; + const xmlChar *scope; + const xmlChar *value; /* The original value of the value constraint. */ + struct _xmlSchemaElement *refDecl; /* This will now be used for the + substitution group affiliation */ + xmlRegexpPtr contModel; /* Obsolete for WXS, maybe used for RelaxNG */ + xmlSchemaContentType contentType; + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaValPtr defVal; /* The compiled value constraint. */ + void *idcs; /* The identity-constraint defs */ +}; + +/* + * XML_SCHEMAS_FACET_UNKNOWN: + * + * unknown facet handling + */ +#define XML_SCHEMAS_FACET_UNKNOWN 0 +/* + * XML_SCHEMAS_FACET_PRESERVE: + * + * preserve the type of the facet + */ +#define XML_SCHEMAS_FACET_PRESERVE 1 +/* + * XML_SCHEMAS_FACET_REPLACE: + * + * replace the type of the facet + */ +#define XML_SCHEMAS_FACET_REPLACE 2 +/* + * XML_SCHEMAS_FACET_COLLAPSE: + * + * collapse the types of the facet + */ +#define XML_SCHEMAS_FACET_COLLAPSE 3 +/** + * A facet definition. + */ +struct _xmlSchemaFacet { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaFacet *next;/* the next type if in a sequence ... */ + const xmlChar *value; /* The original value */ + const xmlChar *id; /* Obsolete */ + xmlSchemaAnnotPtr annot; + xmlNodePtr node; + int fixed; /* XML_SCHEMAS_FACET_PRESERVE, etc. */ + int whitespace; + xmlSchemaValPtr val; /* The compiled value */ + xmlRegexpPtr regexp; /* The regex for patterns */ +}; + +/** + * A notation definition. + */ +typedef struct _xmlSchemaNotation xmlSchemaNotation; +typedef xmlSchemaNotation *xmlSchemaNotationPtr; +struct _xmlSchemaNotation { + xmlSchemaTypeType type; /* The kind of type */ + const xmlChar *name; + xmlSchemaAnnotPtr annot; + const xmlChar *identifier; + const xmlChar *targetNamespace; +}; + +/* +* TODO: Actually all those flags used for the schema should sit +* on the schema parser context, since they are used only +* during parsing an XML schema document, and not available +* on the component level as per spec. +*/ +/** + * XML_SCHEMAS_QUALIF_ELEM: + * + * Reflects elementFormDefault == qualified in + * an XML schema document. + */ +#define XML_SCHEMAS_QUALIF_ELEM 1 << 0 +/** + * XML_SCHEMAS_QUALIF_ATTR: + * + * Reflects attributeFormDefault == qualified in + * an XML schema document. + */ +#define XML_SCHEMAS_QUALIF_ATTR 1 << 1 +/** + * XML_SCHEMAS_FINAL_DEFAULT_EXTENSION: + * + * the schema has "extension" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_EXTENSION 1 << 2 +/** + * XML_SCHEMAS_FINAL_DEFAULT_RESTRICTION: + * + * the schema has "restriction" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_RESTRICTION 1 << 3 +/** + * XML_SCHEMAS_FINAL_DEFAULT_LIST: + * + * the schema has "list" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_LIST 1 << 4 +/** + * XML_SCHEMAS_FINAL_DEFAULT_UNION: + * + * the schema has "union" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_UNION 1 << 5 +/** + * XML_SCHEMAS_BLOCK_DEFAULT_EXTENSION: + * + * the schema has "extension" in the set of blockDefault. + */ +#define XML_SCHEMAS_BLOCK_DEFAULT_EXTENSION 1 << 6 +/** + * XML_SCHEMAS_BLOCK_DEFAULT_RESTRICTION: + * + * the schema has "restriction" in the set of blockDefault. + */ +#define XML_SCHEMAS_BLOCK_DEFAULT_RESTRICTION 1 << 7 +/** + * XML_SCHEMAS_BLOCK_DEFAULT_SUBSTITUTION: + * + * the schema has "substitution" in the set of blockDefault. + */ +#define XML_SCHEMAS_BLOCK_DEFAULT_SUBSTITUTION 1 << 8 +/** + * XML_SCHEMAS_INCLUDING_CONVERT_NS: + * + * the schema is currently including an other schema with + * no target namespace. + */ +#define XML_SCHEMAS_INCLUDING_CONVERT_NS 1 << 9 +/** + * _xmlSchema: + * + * A Schemas definition + */ +struct _xmlSchema { + const xmlChar *name; /* schema name */ + const xmlChar *targetNamespace; /* the target namespace */ + const xmlChar *version; + const xmlChar *id; /* Obsolete */ + xmlDocPtr doc; + xmlSchemaAnnotPtr annot; + int flags; + + xmlHashTablePtr typeDecl; + xmlHashTablePtr attrDecl; + xmlHashTablePtr attrgrpDecl; + xmlHashTablePtr elemDecl; + xmlHashTablePtr notaDecl; + + xmlHashTablePtr schemasImports; + + void *_private; /* unused by the library for users or bindings */ + xmlHashTablePtr groupDecl; + xmlDictPtr dict; + void *includes; /* the includes, this is opaque for now */ + int preserve; /* whether to free the document */ + int counter; /* used to give anonymous components unique names */ + xmlHashTablePtr idcDef; /* All identity-constraint defs. */ + void *volatiles; /* Obsolete */ +}; + +XMLPUBFUN void xmlSchemaFreeType (xmlSchemaTypePtr type); +XMLPUBFUN void xmlSchemaFreeWildcard(xmlSchemaWildcardPtr wildcard); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMAS_ENABLED */ +#endif /* __XML_SCHEMA_INTERNALS_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schematron.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schematron.h new file mode 100644 index 000000000..8dd8d25c4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/schematron.h @@ -0,0 +1,143 @@ +/* + * Summary: XML Schematron implementation + * Description: interface to the XML Schematron validity checking. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SCHEMATRON_H__ +#define __XML_SCHEMATRON_H__ + +#include + +#ifdef LIBXML_SCHEMATRON_ENABLED + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum { + XML_SCHEMATRON_OUT_QUIET = 1 << 0, /* quiet no report */ + XML_SCHEMATRON_OUT_TEXT = 1 << 1, /* build a textual report */ + XML_SCHEMATRON_OUT_XML = 1 << 2, /* output SVRL */ + XML_SCHEMATRON_OUT_ERROR = 1 << 3, /* output via xmlStructuredErrorFunc */ + XML_SCHEMATRON_OUT_FILE = 1 << 8, /* output to a file descriptor */ + XML_SCHEMATRON_OUT_BUFFER = 1 << 9, /* output to a buffer */ + XML_SCHEMATRON_OUT_IO = 1 << 10 /* output to I/O mechanism */ +} xmlSchematronValidOptions; + +/** + * The schemas related types are kept internal + */ +typedef struct _xmlSchematron xmlSchematron; +typedef xmlSchematron *xmlSchematronPtr; + +/** + * xmlSchematronValidityErrorFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of an error callback from a Schematron validation + */ +typedef void (*xmlSchematronValidityErrorFunc) (void *ctx, const char *msg, ...); + +/** + * xmlSchematronValidityWarningFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of a warning callback from a Schematron validation + */ +typedef void (*xmlSchematronValidityWarningFunc) (void *ctx, const char *msg, ...); + +/** + * A schemas validation context + */ +typedef struct _xmlSchematronParserCtxt xmlSchematronParserCtxt; +typedef xmlSchematronParserCtxt *xmlSchematronParserCtxtPtr; + +typedef struct _xmlSchematronValidCtxt xmlSchematronValidCtxt; +typedef xmlSchematronValidCtxt *xmlSchematronValidCtxtPtr; + +/* + * Interfaces for parsing. + */ +XMLPUBFUN xmlSchematronParserCtxtPtr + xmlSchematronNewParserCtxt (const char *URL); +XMLPUBFUN xmlSchematronParserCtxtPtr + xmlSchematronNewMemParserCtxt(const char *buffer, + int size); +XMLPUBFUN xmlSchematronParserCtxtPtr + xmlSchematronNewDocParserCtxt(xmlDocPtr doc); +XMLPUBFUN void + xmlSchematronFreeParserCtxt (xmlSchematronParserCtxtPtr ctxt); +/***** +XMLPUBFUN void + xmlSchematronSetParserErrors(xmlSchematronParserCtxtPtr ctxt, + xmlSchematronValidityErrorFunc err, + xmlSchematronValidityWarningFunc warn, + void *ctx); +XMLPUBFUN int + xmlSchematronGetParserErrors(xmlSchematronParserCtxtPtr ctxt, + xmlSchematronValidityErrorFunc * err, + xmlSchematronValidityWarningFunc * warn, + void **ctx); +XMLPUBFUN int + xmlSchematronIsValid (xmlSchematronValidCtxtPtr ctxt); + *****/ +XMLPUBFUN xmlSchematronPtr + xmlSchematronParse (xmlSchematronParserCtxtPtr ctxt); +XMLPUBFUN void + xmlSchematronFree (xmlSchematronPtr schema); +/* + * Interfaces for validating + */ +XMLPUBFUN void + xmlSchematronSetValidStructuredErrors( + xmlSchematronValidCtxtPtr ctxt, + xmlStructuredErrorFunc serror, + void *ctx); +/****** +XMLPUBFUN void + xmlSchematronSetValidErrors (xmlSchematronValidCtxtPtr ctxt, + xmlSchematronValidityErrorFunc err, + xmlSchematronValidityWarningFunc warn, + void *ctx); +XMLPUBFUN int + xmlSchematronGetValidErrors (xmlSchematronValidCtxtPtr ctxt, + xmlSchematronValidityErrorFunc *err, + xmlSchematronValidityWarningFunc *warn, + void **ctx); +XMLPUBFUN int + xmlSchematronSetValidOptions(xmlSchematronValidCtxtPtr ctxt, + int options); +XMLPUBFUN int + xmlSchematronValidCtxtGetOptions(xmlSchematronValidCtxtPtr ctxt); +XMLPUBFUN int + xmlSchematronValidateOneElement (xmlSchematronValidCtxtPtr ctxt, + xmlNodePtr elem); + *******/ + +XMLPUBFUN xmlSchematronValidCtxtPtr + xmlSchematronNewValidCtxt (xmlSchematronPtr schema, + int options); +XMLPUBFUN void + xmlSchematronFreeValidCtxt (xmlSchematronValidCtxtPtr ctxt); +XMLPUBFUN int + xmlSchematronValidateDoc (xmlSchematronValidCtxtPtr ctxt, + xmlDocPtr instance); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMATRON_ENABLED */ +#endif /* __XML_SCHEMATRON_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/threads.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/threads.h new file mode 100644 index 000000000..8f4b6e174 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/threads.h @@ -0,0 +1,87 @@ +/** + * Summary: interfaces for thread handling + * Description: set of generic threading related routines + * should work with pthreads, Windows native or TLS threads + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_THREADS_H__ +#define __XML_THREADS_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * xmlMutex are a simple mutual exception locks. + */ +typedef struct _xmlMutex xmlMutex; +typedef xmlMutex *xmlMutexPtr; + +/* + * xmlRMutex are reentrant mutual exception locks. + */ +typedef struct _xmlRMutex xmlRMutex; +typedef xmlRMutex *xmlRMutexPtr; + +XMLPUBFUN int + xmlCheckThreadLocalStorage(void); + +XMLPUBFUN xmlMutexPtr + xmlNewMutex (void); +XMLPUBFUN void + xmlMutexLock (xmlMutexPtr tok); +XMLPUBFUN void + xmlMutexUnlock (xmlMutexPtr tok); +XMLPUBFUN void + xmlFreeMutex (xmlMutexPtr tok); + +XMLPUBFUN xmlRMutexPtr + xmlNewRMutex (void); +XMLPUBFUN void + xmlRMutexLock (xmlRMutexPtr tok); +XMLPUBFUN void + xmlRMutexUnlock (xmlRMutexPtr tok); +XMLPUBFUN void + xmlFreeRMutex (xmlRMutexPtr tok); + +/* + * Library wide APIs. + */ +XML_DEPRECATED +XMLPUBFUN void + xmlInitThreads (void); +XMLPUBFUN void + xmlLockLibrary (void); +XMLPUBFUN void + xmlUnlockLibrary(void); +XML_DEPRECATED +XMLPUBFUN int + xmlGetThreadId (void); +XML_DEPRECATED +XMLPUBFUN int + xmlIsMainThread (void); +XML_DEPRECATED +XMLPUBFUN void + xmlCleanupThreads(void); + +/** DOC_DISABLE */ +#if defined(LIBXML_THREAD_ENABLED) && defined(_WIN32) && \ + defined(LIBXML_STATIC_FOR_DLL) +int +xmlDllMain(void *hinstDLL, unsigned long fdwReason, + void *lpvReserved); +#endif +/** DOC_ENABLE */ + +#ifdef __cplusplus +} +#endif + + +#endif /* __XML_THREADS_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/tree.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/tree.h new file mode 100644 index 000000000..4070375b9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/tree.h @@ -0,0 +1,1382 @@ +/* + * Summary: interfaces for tree manipulation + * Description: this module describes the structures found in an tree resulting + * from an XML or HTML parsing, as well as the API provided for + * various processing on that tree + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef XML_TREE_INTERNALS + +/* + * Emulate circular dependency for backward compatibility + */ +#include + +#else /* XML_TREE_INTERNALS */ + +#ifndef __XML_TREE_H__ +#define __XML_TREE_H__ + +#include +#include +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Some of the basic types pointer to structures: + */ +/* xmlIO.h */ +typedef struct _xmlParserInputBuffer xmlParserInputBuffer; +typedef xmlParserInputBuffer *xmlParserInputBufferPtr; + +typedef struct _xmlOutputBuffer xmlOutputBuffer; +typedef xmlOutputBuffer *xmlOutputBufferPtr; + +/* parser.h */ +typedef struct _xmlParserInput xmlParserInput; +typedef xmlParserInput *xmlParserInputPtr; + +typedef struct _xmlParserCtxt xmlParserCtxt; +typedef xmlParserCtxt *xmlParserCtxtPtr; + +typedef struct _xmlSAXLocator xmlSAXLocator; +typedef xmlSAXLocator *xmlSAXLocatorPtr; + +typedef struct _xmlSAXHandler xmlSAXHandler; +typedef xmlSAXHandler *xmlSAXHandlerPtr; + +/* entities.h */ +typedef struct _xmlEntity xmlEntity; +typedef xmlEntity *xmlEntityPtr; + +/** + * BASE_BUFFER_SIZE: + * + * default buffer size 4000. + */ +#define BASE_BUFFER_SIZE 4096 + +/** + * LIBXML_NAMESPACE_DICT: + * + * Defines experimental behaviour: + * 1) xmlNs gets an additional field @context (a xmlDoc) + * 2) when creating a tree, xmlNs->href is stored in the dict of xmlDoc. + */ +/* #define LIBXML_NAMESPACE_DICT */ + +/** + * xmlBufferAllocationScheme: + * + * A buffer allocation scheme can be defined to either match exactly the + * need or double it's allocated size each time it is found too small. + */ + +typedef enum { + XML_BUFFER_ALLOC_DOUBLEIT, /* double each time one need to grow */ + XML_BUFFER_ALLOC_EXACT, /* grow only to the minimal size */ + XML_BUFFER_ALLOC_IMMUTABLE, /* immutable buffer, deprecated */ + XML_BUFFER_ALLOC_IO, /* special allocation scheme used for I/O */ + XML_BUFFER_ALLOC_HYBRID, /* exact up to a threshold, and doubleit thereafter */ + XML_BUFFER_ALLOC_BOUNDED /* limit the upper size of the buffer */ +} xmlBufferAllocationScheme; + +/** + * xmlBuffer: + * + * A buffer structure, this old construct is limited to 2GB and + * is being deprecated, use API with xmlBuf instead + */ +typedef struct _xmlBuffer xmlBuffer; +typedef xmlBuffer *xmlBufferPtr; +struct _xmlBuffer { + xmlChar *content; /* The buffer content UTF8 */ + unsigned int use; /* The buffer size used */ + unsigned int size; /* The buffer size */ + xmlBufferAllocationScheme alloc; /* The realloc method */ + xmlChar *contentIO; /* in IO mode we may have a different base */ +}; + +/** + * xmlBuf: + * + * A buffer structure, new one, the actual structure internals are not public + */ + +typedef struct _xmlBuf xmlBuf; + +/** + * xmlBufPtr: + * + * A pointer to a buffer structure, the actual structure internals are not + * public + */ + +typedef xmlBuf *xmlBufPtr; + +/* + * A few public routines for xmlBuf. As those are expected to be used + * mostly internally the bulk of the routines are internal in buf.h + */ +XMLPUBFUN xmlChar* xmlBufContent (const xmlBuf* buf); +XMLPUBFUN xmlChar* xmlBufEnd (xmlBufPtr buf); +XMLPUBFUN size_t xmlBufUse (const xmlBufPtr buf); +XMLPUBFUN size_t xmlBufShrink (xmlBufPtr buf, size_t len); + +/* + * LIBXML2_NEW_BUFFER: + * + * Macro used to express that the API use the new buffers for + * xmlParserInputBuffer and xmlOutputBuffer. The change was + * introduced in 2.9.0. + */ +#define LIBXML2_NEW_BUFFER + +/** + * XML_XML_NAMESPACE: + * + * This is the namespace for the special xml: prefix predefined in the + * XML Namespace specification. + */ +#define XML_XML_NAMESPACE \ + (const xmlChar *) "http://www.w3.org/XML/1998/namespace" + +/** + * XML_XML_ID: + * + * This is the name for the special xml:id attribute + */ +#define XML_XML_ID (const xmlChar *) "xml:id" + +/* + * The different element types carried by an XML tree. + * + * NOTE: This is synchronized with DOM Level1 values + * See http://www.w3.org/TR/REC-DOM-Level-1/ + * + * Actually this had diverged a bit, and now XML_DOCUMENT_TYPE_NODE should + * be deprecated to use an XML_DTD_NODE. + */ +typedef enum { + XML_ELEMENT_NODE= 1, + XML_ATTRIBUTE_NODE= 2, + XML_TEXT_NODE= 3, + XML_CDATA_SECTION_NODE= 4, + XML_ENTITY_REF_NODE= 5, + XML_ENTITY_NODE= 6, /* unused */ + XML_PI_NODE= 7, + XML_COMMENT_NODE= 8, + XML_DOCUMENT_NODE= 9, + XML_DOCUMENT_TYPE_NODE= 10, /* unused */ + XML_DOCUMENT_FRAG_NODE= 11, + XML_NOTATION_NODE= 12, /* unused */ + XML_HTML_DOCUMENT_NODE= 13, + XML_DTD_NODE= 14, + XML_ELEMENT_DECL= 15, + XML_ATTRIBUTE_DECL= 16, + XML_ENTITY_DECL= 17, + XML_NAMESPACE_DECL= 18, + XML_XINCLUDE_START= 19, + XML_XINCLUDE_END= 20 + /* XML_DOCB_DOCUMENT_NODE= 21 */ /* removed */ +} xmlElementType; + +/** DOC_DISABLE */ +/* For backward compatibility */ +#define XML_DOCB_DOCUMENT_NODE 21 +/** DOC_ENABLE */ + +/** + * xmlNotation: + * + * A DTD Notation definition. + */ + +typedef struct _xmlNotation xmlNotation; +typedef xmlNotation *xmlNotationPtr; +struct _xmlNotation { + const xmlChar *name; /* Notation name */ + const xmlChar *PublicID; /* Public identifier, if any */ + const xmlChar *SystemID; /* System identifier, if any */ +}; + +/** + * xmlAttributeType: + * + * A DTD Attribute type definition. + */ + +typedef enum { + XML_ATTRIBUTE_CDATA = 1, + XML_ATTRIBUTE_ID, + XML_ATTRIBUTE_IDREF , + XML_ATTRIBUTE_IDREFS, + XML_ATTRIBUTE_ENTITY, + XML_ATTRIBUTE_ENTITIES, + XML_ATTRIBUTE_NMTOKEN, + XML_ATTRIBUTE_NMTOKENS, + XML_ATTRIBUTE_ENUMERATION, + XML_ATTRIBUTE_NOTATION +} xmlAttributeType; + +/** + * xmlAttributeDefault: + * + * A DTD Attribute default definition. + */ + +typedef enum { + XML_ATTRIBUTE_NONE = 1, + XML_ATTRIBUTE_REQUIRED, + XML_ATTRIBUTE_IMPLIED, + XML_ATTRIBUTE_FIXED +} xmlAttributeDefault; + +/** + * xmlEnumeration: + * + * List structure used when there is an enumeration in DTDs. + */ + +typedef struct _xmlEnumeration xmlEnumeration; +typedef xmlEnumeration *xmlEnumerationPtr; +struct _xmlEnumeration { + struct _xmlEnumeration *next; /* next one */ + const xmlChar *name; /* Enumeration name */ +}; + +/** + * xmlAttribute: + * + * An Attribute declaration in a DTD. + */ + +typedef struct _xmlAttribute xmlAttribute; +typedef xmlAttribute *xmlAttributePtr; +struct _xmlAttribute { + void *_private; /* application data */ + xmlElementType type; /* XML_ATTRIBUTE_DECL, must be second ! */ + const xmlChar *name; /* Attribute name */ + struct _xmlNode *children; /* NULL */ + struct _xmlNode *last; /* NULL */ + struct _xmlDtd *parent; /* -> DTD */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + struct _xmlAttribute *nexth; /* next in hash table */ + xmlAttributeType atype; /* The attribute type */ + xmlAttributeDefault def; /* the default */ + const xmlChar *defaultValue; /* or the default value */ + xmlEnumerationPtr tree; /* or the enumeration tree if any */ + const xmlChar *prefix; /* the namespace prefix if any */ + const xmlChar *elem; /* Element holding the attribute */ +}; + +/** + * xmlElementContentType: + * + * Possible definitions of element content types. + */ +typedef enum { + XML_ELEMENT_CONTENT_PCDATA = 1, + XML_ELEMENT_CONTENT_ELEMENT, + XML_ELEMENT_CONTENT_SEQ, + XML_ELEMENT_CONTENT_OR +} xmlElementContentType; + +/** + * xmlElementContentOccur: + * + * Possible definitions of element content occurrences. + */ +typedef enum { + XML_ELEMENT_CONTENT_ONCE = 1, + XML_ELEMENT_CONTENT_OPT, + XML_ELEMENT_CONTENT_MULT, + XML_ELEMENT_CONTENT_PLUS +} xmlElementContentOccur; + +/** + * xmlElementContent: + * + * An XML Element content as stored after parsing an element definition + * in a DTD. + */ + +typedef struct _xmlElementContent xmlElementContent; +typedef xmlElementContent *xmlElementContentPtr; +struct _xmlElementContent { + xmlElementContentType type; /* PCDATA, ELEMENT, SEQ or OR */ + xmlElementContentOccur ocur; /* ONCE, OPT, MULT or PLUS */ + const xmlChar *name; /* Element name */ + struct _xmlElementContent *c1; /* first child */ + struct _xmlElementContent *c2; /* second child */ + struct _xmlElementContent *parent; /* parent */ + const xmlChar *prefix; /* Namespace prefix */ +}; + +/** + * xmlElementTypeVal: + * + * The different possibilities for an element content type. + */ + +typedef enum { + XML_ELEMENT_TYPE_UNDEFINED = 0, + XML_ELEMENT_TYPE_EMPTY = 1, + XML_ELEMENT_TYPE_ANY, + XML_ELEMENT_TYPE_MIXED, + XML_ELEMENT_TYPE_ELEMENT +} xmlElementTypeVal; + +/** + * xmlElement: + * + * An XML Element declaration from a DTD. + */ + +typedef struct _xmlElement xmlElement; +typedef xmlElement *xmlElementPtr; +struct _xmlElement { + void *_private; /* application data */ + xmlElementType type; /* XML_ELEMENT_DECL, must be second ! */ + const xmlChar *name; /* Element name */ + struct _xmlNode *children; /* NULL */ + struct _xmlNode *last; /* NULL */ + struct _xmlDtd *parent; /* -> DTD */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + xmlElementTypeVal etype; /* The type */ + xmlElementContentPtr content; /* the allowed element content */ + xmlAttributePtr attributes; /* List of the declared attributes */ + const xmlChar *prefix; /* the namespace prefix if any */ +#ifdef LIBXML_REGEXP_ENABLED + xmlRegexpPtr contModel; /* the validating regexp */ +#else + void *contModel; +#endif +}; + + +/** + * XML_LOCAL_NAMESPACE: + * + * A namespace declaration node. + */ +#define XML_LOCAL_NAMESPACE XML_NAMESPACE_DECL +typedef xmlElementType xmlNsType; + +/** + * xmlNs: + * + * An XML namespace. + * Note that prefix == NULL is valid, it defines the default namespace + * within the subtree (until overridden). + * + * xmlNsType is unified with xmlElementType. + */ + +typedef struct _xmlNs xmlNs; +typedef xmlNs *xmlNsPtr; +struct _xmlNs { + struct _xmlNs *next; /* next Ns link for this node */ + xmlNsType type; /* global or local */ + const xmlChar *href; /* URL for the namespace */ + const xmlChar *prefix; /* prefix for the namespace */ + void *_private; /* application data */ + struct _xmlDoc *context; /* normally an xmlDoc */ +}; + +/** + * xmlDtd: + * + * An XML DTD, as defined by parent link */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + /* End of common part */ + void *notations; /* Hash table for notations if any */ + void *elements; /* Hash table for elements if any */ + void *attributes; /* Hash table for attributes if any */ + void *entities; /* Hash table for entities if any */ + const xmlChar *ExternalID; /* External identifier for PUBLIC DTD */ + const xmlChar *SystemID; /* URI for a SYSTEM or PUBLIC DTD */ + void *pentities; /* Hash table for param entities if any */ +}; + +/** + * xmlAttr: + * + * An attribute on an XML node. + */ +typedef struct _xmlAttr xmlAttr; +typedef xmlAttr *xmlAttrPtr; +struct _xmlAttr { + void *_private; /* application data */ + xmlElementType type; /* XML_ATTRIBUTE_NODE, must be second ! */ + const xmlChar *name; /* the name of the property */ + struct _xmlNode *children; /* the value of the property */ + struct _xmlNode *last; /* NULL */ + struct _xmlNode *parent; /* child->parent link */ + struct _xmlAttr *next; /* next sibling link */ + struct _xmlAttr *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + xmlNs *ns; /* pointer to the associated namespace */ + xmlAttributeType atype; /* the attribute type if validating */ + void *psvi; /* for type/PSVI information */ + struct _xmlID *id; /* the ID struct */ +}; + +/** + * xmlID: + * + * An XML ID instance. + */ + +typedef struct _xmlID xmlID; +typedef xmlID *xmlIDPtr; +struct _xmlID { + struct _xmlID *next; /* next ID */ + const xmlChar *value; /* The ID name */ + xmlAttrPtr attr; /* The attribute holding it */ + const xmlChar *name; /* The attribute if attr is not available */ + int lineno; /* The line number if attr is not available */ + struct _xmlDoc *doc; /* The document holding the ID */ +}; + +/** + * xmlRef: + * + * An XML IDREF instance. + */ + +typedef struct _xmlRef xmlRef; +typedef xmlRef *xmlRefPtr; +struct _xmlRef { + struct _xmlRef *next; /* next Ref */ + const xmlChar *value; /* The Ref name */ + xmlAttrPtr attr; /* The attribute holding it */ + const xmlChar *name; /* The attribute if attr is not available */ + int lineno; /* The line number if attr is not available */ +}; + +/** + * xmlNode: + * + * A node in an XML tree. + */ +typedef struct _xmlNode xmlNode; +typedef xmlNode *xmlNodePtr; +struct _xmlNode { + void *_private; /* application data */ + xmlElementType type; /* type number, must be second ! */ + const xmlChar *name; /* the name of the node, or the entity */ + struct _xmlNode *children; /* parent->childs link */ + struct _xmlNode *last; /* last child link */ + struct _xmlNode *parent; /* child->parent link */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + /* End of common part */ + xmlNs *ns; /* pointer to the associated namespace */ + xmlChar *content; /* the content */ + struct _xmlAttr *properties;/* properties list */ + xmlNs *nsDef; /* namespace definitions on this node */ + void *psvi; /* for type/PSVI information */ + unsigned short line; /* line number */ + unsigned short extra; /* extra data for XPath/XSLT */ +}; + +/** + * XML_GET_CONTENT: + * + * Macro to extract the content pointer of a node. + */ +#define XML_GET_CONTENT(n) \ + ((n)->type == XML_ELEMENT_NODE ? NULL : (n)->content) + +/** + * XML_GET_LINE: + * + * Macro to extract the line number of an element node. + */ +#define XML_GET_LINE(n) \ + (xmlGetLineNo(n)) + +/** + * xmlDocProperty + * + * Set of properties of the document as found by the parser + * Some of them are linked to similarly named xmlParserOption + */ +typedef enum { + XML_DOC_WELLFORMED = 1<<0, /* document is XML well formed */ + XML_DOC_NSVALID = 1<<1, /* document is Namespace valid */ + XML_DOC_OLD10 = 1<<2, /* parsed with old XML-1.0 parser */ + XML_DOC_DTDVALID = 1<<3, /* DTD validation was successful */ + XML_DOC_XINCLUDE = 1<<4, /* XInclude substitution was done */ + XML_DOC_USERBUILT = 1<<5, /* Document was built using the API + and not by parsing an instance */ + XML_DOC_INTERNAL = 1<<6, /* built for internal processing */ + XML_DOC_HTML = 1<<7 /* parsed or built HTML document */ +} xmlDocProperties; + +/** + * xmlDoc: + * + * An XML document. + */ +typedef struct _xmlDoc xmlDoc; +typedef xmlDoc *xmlDocPtr; +struct _xmlDoc { + void *_private; /* application data */ + xmlElementType type; /* XML_DOCUMENT_NODE, must be second ! */ + char *name; /* name/filename/URI of the document */ + struct _xmlNode *children; /* the document tree */ + struct _xmlNode *last; /* last child link */ + struct _xmlNode *parent; /* child->parent link */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* autoreference to itself */ + + /* End of common part */ + int compression;/* level of zlib compression */ + int standalone; /* standalone document (no external refs) + 1 if standalone="yes" + 0 if standalone="no" + -1 if there is no XML declaration + -2 if there is an XML declaration, but no + standalone attribute was specified */ + struct _xmlDtd *intSubset; /* the document internal subset */ + struct _xmlDtd *extSubset; /* the document external subset */ + struct _xmlNs *oldNs; /* Global namespace, the old way */ + const xmlChar *version; /* the XML version string */ + const xmlChar *encoding; /* actual encoding, if any */ + void *ids; /* Hash table for ID attributes if any */ + void *refs; /* Hash table for IDREFs attributes if any */ + const xmlChar *URL; /* The URI for that document */ + int charset; /* unused */ + struct _xmlDict *dict; /* dict used to allocate names or NULL */ + void *psvi; /* for type/PSVI information */ + int parseFlags; /* set of xmlParserOption used to parse the + document */ + int properties; /* set of xmlDocProperties for this document + set at the end of parsing */ +}; + + +typedef struct _xmlDOMWrapCtxt xmlDOMWrapCtxt; +typedef xmlDOMWrapCtxt *xmlDOMWrapCtxtPtr; + +/** + * xmlDOMWrapAcquireNsFunction: + * @ctxt: a DOM wrapper context + * @node: the context node (element or attribute) + * @nsName: the requested namespace name + * @nsPrefix: the requested namespace prefix + * + * A function called to acquire namespaces (xmlNs) from the wrapper. + * + * Returns an xmlNsPtr or NULL in case of an error. + */ +typedef xmlNsPtr (*xmlDOMWrapAcquireNsFunction) (xmlDOMWrapCtxtPtr ctxt, + xmlNodePtr node, + const xmlChar *nsName, + const xmlChar *nsPrefix); + +/** + * xmlDOMWrapCtxt: + * + * Context for DOM wrapper-operations. + */ +struct _xmlDOMWrapCtxt { + void * _private; + /* + * The type of this context, just in case we need specialized + * contexts in the future. + */ + int type; + /* + * Internal namespace map used for various operations. + */ + void * namespaceMap; + /* + * Use this one to acquire an xmlNsPtr intended for node->ns. + * (Note that this is not intended for elem->nsDef). + */ + xmlDOMWrapAcquireNsFunction getNsForNodeFunc; +}; + +/** + * xmlRegisterNodeFunc: + * @node: the current node + * + * Signature for the registration callback of a created node + */ +typedef void (*xmlRegisterNodeFunc) (xmlNodePtr node); + +/** + * xmlDeregisterNodeFunc: + * @node: the current node + * + * Signature for the deregistration callback of a discarded node + */ +typedef void (*xmlDeregisterNodeFunc) (xmlNodePtr node); + +/** + * xmlChildrenNode: + * + * Macro for compatibility naming layer with libxml1. Maps + * to "children." + */ +#ifndef xmlChildrenNode +#define xmlChildrenNode children +#endif + +/** + * xmlRootNode: + * + * Macro for compatibility naming layer with libxml1. Maps + * to "children". + */ +#ifndef xmlRootNode +#define xmlRootNode children +#endif + +/* + * Variables. + */ + +/** DOC_DISABLE */ +#define XML_GLOBALS_TREE \ + XML_OP(xmlBufferAllocScheme, xmlBufferAllocationScheme, XML_DEPRECATED) \ + XML_OP(xmlDefaultBufferSize, int, XML_DEPRECATED) \ + XML_OP(xmlRegisterNodeDefaultValue, xmlRegisterNodeFunc, XML_DEPRECATED) \ + XML_OP(xmlDeregisterNodeDefaultValue, xmlDeregisterNodeFunc, \ + XML_DEPRECATED) + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_TREE +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlBufferAllocScheme XML_GLOBAL_MACRO(xmlBufferAllocScheme) + #define xmlDefaultBufferSize XML_GLOBAL_MACRO(xmlDefaultBufferSize) + #define xmlRegisterNodeDefaultValue \ + XML_GLOBAL_MACRO(xmlRegisterNodeDefaultValue) + #define xmlDeregisterNodeDefaultValue \ + XML_GLOBAL_MACRO(xmlDeregisterNodeDefaultValue) +#endif +/** DOC_ENABLE */ + +/* + * Some helper functions + */ +XMLPUBFUN int + xmlValidateNCName (const xmlChar *value, + int space); + +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN int + xmlValidateQName (const xmlChar *value, + int space); +XMLPUBFUN int + xmlValidateName (const xmlChar *value, + int space); +XMLPUBFUN int + xmlValidateNMToken (const xmlChar *value, + int space); +#endif + +XMLPUBFUN xmlChar * + xmlBuildQName (const xmlChar *ncname, + const xmlChar *prefix, + xmlChar *memory, + int len); +XMLPUBFUN xmlChar * + xmlSplitQName2 (const xmlChar *name, + xmlChar **prefix); +XMLPUBFUN const xmlChar * + xmlSplitQName3 (const xmlChar *name, + int *len); + +/* + * Handling Buffers, the old ones see @xmlBuf for the new ones. + */ + +XMLPUBFUN void + xmlSetBufferAllocationScheme(xmlBufferAllocationScheme scheme); +XMLPUBFUN xmlBufferAllocationScheme + xmlGetBufferAllocationScheme(void); + +XMLPUBFUN xmlBufferPtr + xmlBufferCreate (void); +XMLPUBFUN xmlBufferPtr + xmlBufferCreateSize (size_t size); +XMLPUBFUN xmlBufferPtr + xmlBufferCreateStatic (void *mem, + size_t size); +XMLPUBFUN int + xmlBufferResize (xmlBufferPtr buf, + unsigned int size); +XMLPUBFUN void + xmlBufferFree (xmlBufferPtr buf); +XMLPUBFUN int + xmlBufferDump (FILE *file, + xmlBufferPtr buf); +XMLPUBFUN int + xmlBufferAdd (xmlBufferPtr buf, + const xmlChar *str, + int len); +XMLPUBFUN int + xmlBufferAddHead (xmlBufferPtr buf, + const xmlChar *str, + int len); +XMLPUBFUN int + xmlBufferCat (xmlBufferPtr buf, + const xmlChar *str); +XMLPUBFUN int + xmlBufferCCat (xmlBufferPtr buf, + const char *str); +XMLPUBFUN int + xmlBufferShrink (xmlBufferPtr buf, + unsigned int len); +XMLPUBFUN int + xmlBufferGrow (xmlBufferPtr buf, + unsigned int len); +XMLPUBFUN void + xmlBufferEmpty (xmlBufferPtr buf); +XMLPUBFUN const xmlChar* + xmlBufferContent (const xmlBuffer *buf); +XMLPUBFUN xmlChar* + xmlBufferDetach (xmlBufferPtr buf); +XMLPUBFUN void + xmlBufferSetAllocationScheme(xmlBufferPtr buf, + xmlBufferAllocationScheme scheme); +XMLPUBFUN int + xmlBufferLength (const xmlBuffer *buf); + +/* + * Creating/freeing new structures. + */ +XMLPUBFUN xmlDtdPtr + xmlCreateIntSubset (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlDtdPtr + xmlNewDtd (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlDtdPtr + xmlGetIntSubset (const xmlDoc *doc); +XMLPUBFUN void + xmlFreeDtd (xmlDtdPtr cur); +#ifdef LIBXML_LEGACY_ENABLED +XML_DEPRECATED +XMLPUBFUN xmlNsPtr + xmlNewGlobalNs (xmlDocPtr doc, + const xmlChar *href, + const xmlChar *prefix); +#endif /* LIBXML_LEGACY_ENABLED */ +XMLPUBFUN xmlNsPtr + xmlNewNs (xmlNodePtr node, + const xmlChar *href, + const xmlChar *prefix); +XMLPUBFUN void + xmlFreeNs (xmlNsPtr cur); +XMLPUBFUN void + xmlFreeNsList (xmlNsPtr cur); +XMLPUBFUN xmlDocPtr + xmlNewDoc (const xmlChar *version); +XMLPUBFUN void + xmlFreeDoc (xmlDocPtr cur); +XMLPUBFUN xmlAttrPtr + xmlNewDocProp (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *value); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_HTML_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlAttrPtr + xmlNewProp (xmlNodePtr node, + const xmlChar *name, + const xmlChar *value); +#endif +XMLPUBFUN xmlAttrPtr + xmlNewNsProp (xmlNodePtr node, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *value); +XMLPUBFUN xmlAttrPtr + xmlNewNsPropEatName (xmlNodePtr node, + xmlNsPtr ns, + xmlChar *name, + const xmlChar *value); +XMLPUBFUN void + xmlFreePropList (xmlAttrPtr cur); +XMLPUBFUN void + xmlFreeProp (xmlAttrPtr cur); +XMLPUBFUN xmlAttrPtr + xmlCopyProp (xmlNodePtr target, + xmlAttrPtr cur); +XMLPUBFUN xmlAttrPtr + xmlCopyPropList (xmlNodePtr target, + xmlAttrPtr cur); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlDtdPtr + xmlCopyDtd (xmlDtdPtr dtd); +#endif /* LIBXML_TREE_ENABLED */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlDocPtr + xmlCopyDoc (xmlDocPtr doc, + int recursive); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) */ +/* + * Creating new nodes. + */ +XMLPUBFUN xmlNodePtr + xmlNewDocNode (xmlDocPtr doc, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocNodeEatName (xmlDocPtr doc, + xmlNsPtr ns, + xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewNode (xmlNsPtr ns, + const xmlChar *name); +XMLPUBFUN xmlNodePtr + xmlNewNodeEatName (xmlNsPtr ns, + xmlChar *name); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlNodePtr + xmlNewChild (xmlNodePtr parent, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +#endif +XMLPUBFUN xmlNodePtr + xmlNewDocText (const xmlDoc *doc, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewText (const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocPI (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewPI (const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocTextLen (xmlDocPtr doc, + const xmlChar *content, + int len); +XMLPUBFUN xmlNodePtr + xmlNewTextLen (const xmlChar *content, + int len); +XMLPUBFUN xmlNodePtr + xmlNewDocComment (xmlDocPtr doc, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewComment (const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewCDataBlock (xmlDocPtr doc, + const xmlChar *content, + int len); +XMLPUBFUN xmlNodePtr + xmlNewCharRef (xmlDocPtr doc, + const xmlChar *name); +XMLPUBFUN xmlNodePtr + xmlNewReference (const xmlDoc *doc, + const xmlChar *name); +XMLPUBFUN xmlNodePtr + xmlCopyNode (xmlNodePtr node, + int recursive); +XMLPUBFUN xmlNodePtr + xmlDocCopyNode (xmlNodePtr node, + xmlDocPtr doc, + int recursive); +XMLPUBFUN xmlNodePtr + xmlDocCopyNodeList (xmlDocPtr doc, + xmlNodePtr node); +XMLPUBFUN xmlNodePtr + xmlCopyNodeList (xmlNodePtr node); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlNodePtr + xmlNewTextChild (xmlNodePtr parent, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocRawNode (xmlDocPtr doc, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocFragment (xmlDocPtr doc); +#endif /* LIBXML_TREE_ENABLED */ + +/* + * Navigating. + */ +XMLPUBFUN long + xmlGetLineNo (const xmlNode *node); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_DEBUG_ENABLED) +XMLPUBFUN xmlChar * + xmlGetNodePath (const xmlNode *node); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_DEBUG_ENABLED) */ +XMLPUBFUN xmlNodePtr + xmlDocGetRootElement (const xmlDoc *doc); +XMLPUBFUN xmlNodePtr + xmlGetLastChild (const xmlNode *parent); +XMLPUBFUN int + xmlNodeIsText (const xmlNode *node); +XMLPUBFUN int + xmlIsBlankNode (const xmlNode *node); + +/* + * Changing the structure. + */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) +XMLPUBFUN xmlNodePtr + xmlDocSetRootElement (xmlDocPtr doc, + xmlNodePtr root); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) */ +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN void + xmlNodeSetName (xmlNodePtr cur, + const xmlChar *name); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN xmlNodePtr + xmlAddChild (xmlNodePtr parent, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr + xmlAddChildList (xmlNodePtr parent, + xmlNodePtr cur); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) +XMLPUBFUN xmlNodePtr + xmlReplaceNode (xmlNodePtr old, + xmlNodePtr cur); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_HTML_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) +XMLPUBFUN xmlNodePtr + xmlAddPrevSibling (xmlNodePtr cur, + xmlNodePtr elem); +#endif /* LIBXML_TREE_ENABLED || LIBXML_HTML_ENABLED || LIBXML_SCHEMAS_ENABLED */ +XMLPUBFUN xmlNodePtr + xmlAddSibling (xmlNodePtr cur, + xmlNodePtr elem); +XMLPUBFUN xmlNodePtr + xmlAddNextSibling (xmlNodePtr cur, + xmlNodePtr elem); +XMLPUBFUN void + xmlUnlinkNode (xmlNodePtr cur); +XMLPUBFUN xmlNodePtr + xmlTextMerge (xmlNodePtr first, + xmlNodePtr second); +XMLPUBFUN int + xmlTextConcat (xmlNodePtr node, + const xmlChar *content, + int len); +XMLPUBFUN void + xmlFreeNodeList (xmlNodePtr cur); +XMLPUBFUN void + xmlFreeNode (xmlNodePtr cur); +XMLPUBFUN int + xmlSetTreeDoc (xmlNodePtr tree, + xmlDocPtr doc); +XMLPUBFUN int + xmlSetListDoc (xmlNodePtr list, + xmlDocPtr doc); +/* + * Namespaces. + */ +XMLPUBFUN xmlNsPtr + xmlSearchNs (xmlDocPtr doc, + xmlNodePtr node, + const xmlChar *nameSpace); +XMLPUBFUN xmlNsPtr + xmlSearchNsByHref (xmlDocPtr doc, + xmlNodePtr node, + const xmlChar *href); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XPATH_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN int + xmlGetNsListSafe (const xmlDoc *doc, + const xmlNode *node, + xmlNsPtr **out); +XMLPUBFUN xmlNsPtr * + xmlGetNsList (const xmlDoc *doc, + const xmlNode *node); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XPATH_ENABLED) */ + +XMLPUBFUN void + xmlSetNs (xmlNodePtr node, + xmlNsPtr ns); +XMLPUBFUN xmlNsPtr + xmlCopyNamespace (xmlNsPtr cur); +XMLPUBFUN xmlNsPtr + xmlCopyNamespaceList (xmlNsPtr cur); + +/* + * Changing the content. + */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) || defined(LIBXML_HTML_ENABLED) +XMLPUBFUN xmlAttrPtr + xmlSetProp (xmlNodePtr node, + const xmlChar *name, + const xmlChar *value); +XMLPUBFUN xmlAttrPtr + xmlSetNsProp (xmlNodePtr node, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *value); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) || defined(LIBXML_HTML_ENABLED) */ +XMLPUBFUN int + xmlNodeGetAttrValue (const xmlNode *node, + const xmlChar *name, + const xmlChar *nsUri, + xmlChar **out); +XMLPUBFUN xmlChar * + xmlGetNoNsProp (const xmlNode *node, + const xmlChar *name); +XMLPUBFUN xmlChar * + xmlGetProp (const xmlNode *node, + const xmlChar *name); +XMLPUBFUN xmlAttrPtr + xmlHasProp (const xmlNode *node, + const xmlChar *name); +XMLPUBFUN xmlAttrPtr + xmlHasNsProp (const xmlNode *node, + const xmlChar *name, + const xmlChar *nameSpace); +XMLPUBFUN xmlChar * + xmlGetNsProp (const xmlNode *node, + const xmlChar *name, + const xmlChar *nameSpace); +XMLPUBFUN xmlNodePtr + xmlStringGetNodeList (const xmlDoc *doc, + const xmlChar *value); +XMLPUBFUN xmlNodePtr + xmlStringLenGetNodeList (const xmlDoc *doc, + const xmlChar *value, + int len); +XMLPUBFUN xmlChar * + xmlNodeListGetString (xmlDocPtr doc, + const xmlNode *list, + int inLine); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlChar * + xmlNodeListGetRawString (const xmlDoc *doc, + const xmlNode *list, + int inLine); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN int + xmlNodeSetContent (xmlNodePtr cur, + const xmlChar *content); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN int + xmlNodeSetContentLen (xmlNodePtr cur, + const xmlChar *content, + int len); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN int + xmlNodeAddContent (xmlNodePtr cur, + const xmlChar *content); +XMLPUBFUN int + xmlNodeAddContentLen (xmlNodePtr cur, + const xmlChar *content, + int len); +XMLPUBFUN xmlChar * + xmlNodeGetContent (const xmlNode *cur); + +XMLPUBFUN int + xmlNodeBufGetContent (xmlBufferPtr buffer, + const xmlNode *cur); +XMLPUBFUN int + xmlBufGetNodeContent (xmlBufPtr buf, + const xmlNode *cur); + +XMLPUBFUN xmlChar * + xmlNodeGetLang (const xmlNode *cur); +XMLPUBFUN int + xmlNodeGetSpacePreserve (const xmlNode *cur); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN int + xmlNodeSetLang (xmlNodePtr cur, + const xmlChar *lang); +XMLPUBFUN int + xmlNodeSetSpacePreserve (xmlNodePtr cur, + int val); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN int + xmlNodeGetBaseSafe (const xmlDoc *doc, + const xmlNode *cur, + xmlChar **baseOut); +XMLPUBFUN xmlChar * + xmlNodeGetBase (const xmlDoc *doc, + const xmlNode *cur); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) +XMLPUBFUN int + xmlNodeSetBase (xmlNodePtr cur, + const xmlChar *uri); +#endif + +/* + * Removing content. + */ +XMLPUBFUN int + xmlRemoveProp (xmlAttrPtr cur); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN int + xmlUnsetNsProp (xmlNodePtr node, + xmlNsPtr ns, + const xmlChar *name); +XMLPUBFUN int + xmlUnsetProp (xmlNodePtr node, + const xmlChar *name); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) */ + +/* + * Internal, don't use. + */ +XMLPUBFUN void + xmlBufferWriteCHAR (xmlBufferPtr buf, + const xmlChar *string); +XMLPUBFUN void + xmlBufferWriteChar (xmlBufferPtr buf, + const char *string); +XMLPUBFUN void + xmlBufferWriteQuotedString(xmlBufferPtr buf, + const xmlChar *string); + +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void xmlAttrSerializeTxtContent(xmlBufferPtr buf, + xmlDocPtr doc, + xmlAttrPtr attr, + const xmlChar *string); +#endif /* LIBXML_OUTPUT_ENABLED */ + +#ifdef LIBXML_TREE_ENABLED +/* + * Namespace handling. + */ +XMLPUBFUN int + xmlReconciliateNs (xmlDocPtr doc, + xmlNodePtr tree); +#endif + +#ifdef LIBXML_OUTPUT_ENABLED +/* + * Saving. + */ +XMLPUBFUN void + xmlDocDumpFormatMemory (xmlDocPtr cur, + xmlChar **mem, + int *size, + int format); +XMLPUBFUN void + xmlDocDumpMemory (xmlDocPtr cur, + xmlChar **mem, + int *size); +XMLPUBFUN void + xmlDocDumpMemoryEnc (xmlDocPtr out_doc, + xmlChar **doc_txt_ptr, + int * doc_txt_len, + const char *txt_encoding); +XMLPUBFUN void + xmlDocDumpFormatMemoryEnc(xmlDocPtr out_doc, + xmlChar **doc_txt_ptr, + int * doc_txt_len, + const char *txt_encoding, + int format); +XMLPUBFUN int + xmlDocFormatDump (FILE *f, + xmlDocPtr cur, + int format); +XMLPUBFUN int + xmlDocDump (FILE *f, + xmlDocPtr cur); +XMLPUBFUN void + xmlElemDump (FILE *f, + xmlDocPtr doc, + xmlNodePtr cur); +XMLPUBFUN int + xmlSaveFile (const char *filename, + xmlDocPtr cur); +XMLPUBFUN int + xmlSaveFormatFile (const char *filename, + xmlDocPtr cur, + int format); +XMLPUBFUN size_t + xmlBufNodeDump (xmlBufPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + int level, + int format); +XMLPUBFUN int + xmlNodeDump (xmlBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + int level, + int format); + +XMLPUBFUN int + xmlSaveFileTo (xmlOutputBufferPtr buf, + xmlDocPtr cur, + const char *encoding); +XMLPUBFUN int + xmlSaveFormatFileTo (xmlOutputBufferPtr buf, + xmlDocPtr cur, + const char *encoding, + int format); +XMLPUBFUN void + xmlNodeDumpOutput (xmlOutputBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + int level, + int format, + const char *encoding); + +XMLPUBFUN int + xmlSaveFormatFileEnc (const char *filename, + xmlDocPtr cur, + const char *encoding, + int format); + +XMLPUBFUN int + xmlSaveFileEnc (const char *filename, + xmlDocPtr cur, + const char *encoding); + +#endif /* LIBXML_OUTPUT_ENABLED */ +/* + * XHTML + */ +XMLPUBFUN int + xmlIsXHTML (const xmlChar *systemID, + const xmlChar *publicID); + +/* + * Compression. + */ +XMLPUBFUN int + xmlGetDocCompressMode (const xmlDoc *doc); +XMLPUBFUN void + xmlSetDocCompressMode (xmlDocPtr doc, + int mode); +XML_DEPRECATED +XMLPUBFUN int + xmlGetCompressMode (void); +XML_DEPRECATED +XMLPUBFUN void + xmlSetCompressMode (int mode); + +/* +* DOM-wrapper helper functions. +*/ +XMLPUBFUN xmlDOMWrapCtxtPtr + xmlDOMWrapNewCtxt (void); +XMLPUBFUN void + xmlDOMWrapFreeCtxt (xmlDOMWrapCtxtPtr ctxt); +XMLPUBFUN int + xmlDOMWrapReconcileNamespaces(xmlDOMWrapCtxtPtr ctxt, + xmlNodePtr elem, + int options); +XMLPUBFUN int + xmlDOMWrapAdoptNode (xmlDOMWrapCtxtPtr ctxt, + xmlDocPtr sourceDoc, + xmlNodePtr node, + xmlDocPtr destDoc, + xmlNodePtr destParent, + int options); +XMLPUBFUN int + xmlDOMWrapRemoveNode (xmlDOMWrapCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr node, + int options); +XMLPUBFUN int + xmlDOMWrapCloneNode (xmlDOMWrapCtxtPtr ctxt, + xmlDocPtr sourceDoc, + xmlNodePtr node, + xmlNodePtr *clonedNode, + xmlDocPtr destDoc, + xmlNodePtr destParent, + int deep, + int options); + +#ifdef LIBXML_TREE_ENABLED +/* + * 5 interfaces from DOM ElementTraversal, but different in entities + * traversal. + */ +XMLPUBFUN unsigned long + xmlChildElementCount (xmlNodePtr parent); +XMLPUBFUN xmlNodePtr + xmlNextElementSibling (xmlNodePtr node); +XMLPUBFUN xmlNodePtr + xmlFirstElementChild (xmlNodePtr parent); +XMLPUBFUN xmlNodePtr + xmlLastElementChild (xmlNodePtr parent); +XMLPUBFUN xmlNodePtr + xmlPreviousElementSibling (xmlNodePtr node); +#endif + +XML_DEPRECATED +XMLPUBFUN xmlRegisterNodeFunc + xmlRegisterNodeDefault (xmlRegisterNodeFunc func); +XML_DEPRECATED +XMLPUBFUN xmlDeregisterNodeFunc + xmlDeregisterNodeDefault (xmlDeregisterNodeFunc func); +XML_DEPRECATED +XMLPUBFUN xmlRegisterNodeFunc + xmlThrDefRegisterNodeDefault(xmlRegisterNodeFunc func); +XML_DEPRECATED +XMLPUBFUN xmlDeregisterNodeFunc + xmlThrDefDeregisterNodeDefault(xmlDeregisterNodeFunc func); + +XML_DEPRECATED XMLPUBFUN xmlBufferAllocationScheme + xmlThrDefBufferAllocScheme (xmlBufferAllocationScheme v); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefDefaultBufferSize (int v); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_TREE_H__ */ + +#endif /* XML_TREE_INTERNALS */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/uri.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/uri.h new file mode 100644 index 000000000..19980b711 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/uri.h @@ -0,0 +1,106 @@ +/** + * Summary: library of generic URI related routines + * Description: library of generic URI related routines + * Implements RFC 2396 + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_URI_H__ +#define __XML_URI_H__ + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlURI: + * + * A parsed URI reference. This is a struct containing the various fields + * as described in RFC 2396 but separated for further processing. + * + * Note: query is a deprecated field which is incorrectly unescaped. + * query_raw takes precedence over query if the former is set. + * See: http://mail.gnome.org/archives/xml/2007-April/thread.html#00127 + */ +typedef struct _xmlURI xmlURI; +typedef xmlURI *xmlURIPtr; +struct _xmlURI { + char *scheme; /* the URI scheme */ + char *opaque; /* opaque part */ + char *authority; /* the authority part */ + char *server; /* the server part */ + char *user; /* the user part */ + int port; /* the port number */ + char *path; /* the path string */ + char *query; /* the query string (deprecated - use with caution) */ + char *fragment; /* the fragment identifier */ + int cleanup; /* parsing potentially unclean URI */ + char *query_raw; /* the query string (as it appears in the URI) */ +}; + +/* + * This function is in tree.h: + * xmlChar * xmlNodeGetBase (xmlDocPtr doc, + * xmlNodePtr cur); + */ +XMLPUBFUN xmlURIPtr + xmlCreateURI (void); +XMLPUBFUN int + xmlBuildURISafe (const xmlChar *URI, + const xmlChar *base, + xmlChar **out); +XMLPUBFUN xmlChar * + xmlBuildURI (const xmlChar *URI, + const xmlChar *base); +XMLPUBFUN int + xmlBuildRelativeURISafe (const xmlChar *URI, + const xmlChar *base, + xmlChar **out); +XMLPUBFUN xmlChar * + xmlBuildRelativeURI (const xmlChar *URI, + const xmlChar *base); +XMLPUBFUN xmlURIPtr + xmlParseURI (const char *str); +XMLPUBFUN int + xmlParseURISafe (const char *str, + xmlURIPtr *uri); +XMLPUBFUN xmlURIPtr + xmlParseURIRaw (const char *str, + int raw); +XMLPUBFUN int + xmlParseURIReference (xmlURIPtr uri, + const char *str); +XMLPUBFUN xmlChar * + xmlSaveUri (xmlURIPtr uri); +XMLPUBFUN void + xmlPrintURI (FILE *stream, + xmlURIPtr uri); +XMLPUBFUN xmlChar * + xmlURIEscapeStr (const xmlChar *str, + const xmlChar *list); +XMLPUBFUN char * + xmlURIUnescapeString (const char *str, + int len, + char *target); +XMLPUBFUN int + xmlNormalizeURIPath (char *path); +XMLPUBFUN xmlChar * + xmlURIEscape (const xmlChar *str); +XMLPUBFUN void + xmlFreeURI (xmlURIPtr uri); +XMLPUBFUN xmlChar* + xmlCanonicPath (const xmlChar *path); +XMLPUBFUN xmlChar* + xmlPathToURI (const xmlChar *path); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_URI_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/valid.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/valid.h new file mode 100644 index 000000000..e1698d7a3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/valid.h @@ -0,0 +1,477 @@ +/* + * Summary: The DTD validation + * Description: API for the DTD handling and the validity checking + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_VALID_H__ +#define __XML_VALID_H__ + +/** DOC_DISABLE */ +#include +#include +#define XML_TREE_INTERNALS +#include +#undef XML_TREE_INTERNALS +#include +#include +#include +/** DOC_ENABLE */ + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Validation state added for non-determinist content model. + */ +typedef struct _xmlValidState xmlValidState; +typedef xmlValidState *xmlValidStatePtr; + +/** + * xmlValidityErrorFunc: + * @ctx: usually an xmlValidCtxtPtr to a validity error context, + * but comes from ctxt->userData (which normally contains such + * a pointer); ctxt->userData can be changed by the user. + * @msg: the string to format *printf like vararg + * @...: remaining arguments to the format + * + * Callback called when a validity error is found. This is a message + * oriented function similar to an *printf function. + */ +typedef void (*xmlValidityErrorFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * xmlValidityWarningFunc: + * @ctx: usually an xmlValidCtxtPtr to a validity error context, + * but comes from ctxt->userData (which normally contains such + * a pointer); ctxt->userData can be changed by the user. + * @msg: the string to format *printf like vararg + * @...: remaining arguments to the format + * + * Callback called when a validity warning is found. This is a message + * oriented function similar to an *printf function. + */ +typedef void (*xmlValidityWarningFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); + +/* + * xmlValidCtxt: + * An xmlValidCtxt is used for error reporting when validating. + */ +typedef struct _xmlValidCtxt xmlValidCtxt; +typedef xmlValidCtxt *xmlValidCtxtPtr; +struct _xmlValidCtxt { + void *userData; /* user specific data block */ + xmlValidityErrorFunc error; /* the callback in case of errors */ + xmlValidityWarningFunc warning; /* the callback in case of warning */ + + /* Node analysis stack used when validating within entities */ + xmlNodePtr node; /* Current parsed Node */ + int nodeNr; /* Depth of the parsing stack */ + int nodeMax; /* Max depth of the parsing stack */ + xmlNodePtr *nodeTab; /* array of nodes */ + + unsigned int flags; /* internal flags */ + xmlDocPtr doc; /* the document */ + int valid; /* temporary validity check result */ + + /* state state used for non-determinist content validation */ + xmlValidState *vstate; /* current state */ + int vstateNr; /* Depth of the validation stack */ + int vstateMax; /* Max depth of the validation stack */ + xmlValidState *vstateTab; /* array of validation states */ + +#ifdef LIBXML_REGEXP_ENABLED + xmlAutomataPtr am; /* the automata */ + xmlAutomataStatePtr state; /* used to build the automata */ +#else + void *am; + void *state; +#endif +}; + +/* + * ALL notation declarations are stored in a table. + * There is one table per DTD. + */ + +typedef struct _xmlHashTable xmlNotationTable; +typedef xmlNotationTable *xmlNotationTablePtr; + +/* + * ALL element declarations are stored in a table. + * There is one table per DTD. + */ + +typedef struct _xmlHashTable xmlElementTable; +typedef xmlElementTable *xmlElementTablePtr; + +/* + * ALL attribute declarations are stored in a table. + * There is one table per DTD. + */ + +typedef struct _xmlHashTable xmlAttributeTable; +typedef xmlAttributeTable *xmlAttributeTablePtr; + +/* + * ALL IDs attributes are stored in a table. + * There is one table per document. + */ + +typedef struct _xmlHashTable xmlIDTable; +typedef xmlIDTable *xmlIDTablePtr; + +/* + * ALL Refs attributes are stored in a table. + * There is one table per document. + */ + +typedef struct _xmlHashTable xmlRefTable; +typedef xmlRefTable *xmlRefTablePtr; + +/* Notation */ +XMLPUBFUN xmlNotationPtr + xmlAddNotationDecl (xmlValidCtxtPtr ctxt, + xmlDtdPtr dtd, + const xmlChar *name, + const xmlChar *PublicID, + const xmlChar *SystemID); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlNotationTablePtr + xmlCopyNotationTable (xmlNotationTablePtr table); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlFreeNotationTable (xmlNotationTablePtr table); +#ifdef LIBXML_OUTPUT_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlDumpNotationDecl (xmlBufferPtr buf, + xmlNotationPtr nota); +/* XML_DEPRECATED, still used in lxml */ +XMLPUBFUN void + xmlDumpNotationTable (xmlBufferPtr buf, + xmlNotationTablePtr table); +#endif /* LIBXML_OUTPUT_ENABLED */ + +/* Element Content */ +/* the non Doc version are being deprecated */ +XMLPUBFUN xmlElementContentPtr + xmlNewElementContent (const xmlChar *name, + xmlElementContentType type); +XMLPUBFUN xmlElementContentPtr + xmlCopyElementContent (xmlElementContentPtr content); +XMLPUBFUN void + xmlFreeElementContent (xmlElementContentPtr cur); +/* the new versions with doc argument */ +XMLPUBFUN xmlElementContentPtr + xmlNewDocElementContent (xmlDocPtr doc, + const xmlChar *name, + xmlElementContentType type); +XMLPUBFUN xmlElementContentPtr + xmlCopyDocElementContent(xmlDocPtr doc, + xmlElementContentPtr content); +XMLPUBFUN void + xmlFreeDocElementContent(xmlDocPtr doc, + xmlElementContentPtr cur); +XMLPUBFUN void + xmlSnprintfElementContent(char *buf, + int size, + xmlElementContentPtr content, + int englob); +#ifdef LIBXML_OUTPUT_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlSprintfElementContent(char *buf, + xmlElementContentPtr content, + int englob); +#endif /* LIBXML_OUTPUT_ENABLED */ + +/* Element */ +XMLPUBFUN xmlElementPtr + xmlAddElementDecl (xmlValidCtxtPtr ctxt, + xmlDtdPtr dtd, + const xmlChar *name, + xmlElementTypeVal type, + xmlElementContentPtr content); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlElementTablePtr + xmlCopyElementTable (xmlElementTablePtr table); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlFreeElementTable (xmlElementTablePtr table); +#ifdef LIBXML_OUTPUT_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlDumpElementTable (xmlBufferPtr buf, + xmlElementTablePtr table); +XML_DEPRECATED +XMLPUBFUN void + xmlDumpElementDecl (xmlBufferPtr buf, + xmlElementPtr elem); +#endif /* LIBXML_OUTPUT_ENABLED */ + +/* Enumeration */ +XMLPUBFUN xmlEnumerationPtr + xmlCreateEnumeration (const xmlChar *name); +XMLPUBFUN void + xmlFreeEnumeration (xmlEnumerationPtr cur); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlEnumerationPtr + xmlCopyEnumeration (xmlEnumerationPtr cur); +#endif /* LIBXML_TREE_ENABLED */ + +/* Attribute */ +XMLPUBFUN xmlAttributePtr + xmlAddAttributeDecl (xmlValidCtxtPtr ctxt, + xmlDtdPtr dtd, + const xmlChar *elem, + const xmlChar *name, + const xmlChar *ns, + xmlAttributeType type, + xmlAttributeDefault def, + const xmlChar *defaultValue, + xmlEnumerationPtr tree); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlAttributeTablePtr + xmlCopyAttributeTable (xmlAttributeTablePtr table); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlFreeAttributeTable (xmlAttributeTablePtr table); +#ifdef LIBXML_OUTPUT_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlDumpAttributeTable (xmlBufferPtr buf, + xmlAttributeTablePtr table); +XML_DEPRECATED +XMLPUBFUN void + xmlDumpAttributeDecl (xmlBufferPtr buf, + xmlAttributePtr attr); +#endif /* LIBXML_OUTPUT_ENABLED */ + +/* IDs */ +XMLPUBFUN int + xmlAddIDSafe (xmlAttrPtr attr, + const xmlChar *value); +XMLPUBFUN xmlIDPtr + xmlAddID (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + const xmlChar *value, + xmlAttrPtr attr); +XMLPUBFUN void + xmlFreeIDTable (xmlIDTablePtr table); +XMLPUBFUN xmlAttrPtr + xmlGetID (xmlDocPtr doc, + const xmlChar *ID); +XMLPUBFUN int + xmlIsID (xmlDocPtr doc, + xmlNodePtr elem, + xmlAttrPtr attr); +XMLPUBFUN int + xmlRemoveID (xmlDocPtr doc, + xmlAttrPtr attr); + +/* IDREFs */ +XML_DEPRECATED +XMLPUBFUN xmlRefPtr + xmlAddRef (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + const xmlChar *value, + xmlAttrPtr attr); +XML_DEPRECATED +XMLPUBFUN void + xmlFreeRefTable (xmlRefTablePtr table); +XML_DEPRECATED +XMLPUBFUN int + xmlIsRef (xmlDocPtr doc, + xmlNodePtr elem, + xmlAttrPtr attr); +XML_DEPRECATED +XMLPUBFUN int + xmlRemoveRef (xmlDocPtr doc, + xmlAttrPtr attr); +XML_DEPRECATED +XMLPUBFUN xmlListPtr + xmlGetRefs (xmlDocPtr doc, + const xmlChar *ID); + +/** + * The public function calls related to validity checking. + */ +#ifdef LIBXML_VALID_ENABLED +/* Allocate/Release Validation Contexts */ +XMLPUBFUN xmlValidCtxtPtr + xmlNewValidCtxt(void); +XMLPUBFUN void + xmlFreeValidCtxt(xmlValidCtxtPtr); + +XML_DEPRECATED +XMLPUBFUN int + xmlValidateRoot (xmlValidCtxtPtr ctxt, + xmlDocPtr doc); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateElementDecl (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlElementPtr elem); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlValidNormalizeAttributeValue(xmlDocPtr doc, + xmlNodePtr elem, + const xmlChar *name, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlValidCtxtNormalizeAttributeValue(xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem, + const xmlChar *name, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateAttributeDecl(xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlAttributePtr attr); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateAttributeValue(xmlAttributeType type, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateNotationDecl (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNotationPtr nota); +XMLPUBFUN int + xmlValidateDtd (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlDtdPtr dtd); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateDtdFinal (xmlValidCtxtPtr ctxt, + xmlDocPtr doc); +XMLPUBFUN int + xmlValidateDocument (xmlValidCtxtPtr ctxt, + xmlDocPtr doc); +XMLPUBFUN int + xmlValidateElement (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateOneElement (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateOneAttribute (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem, + xmlAttrPtr attr, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateOneNamespace (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem, + const xmlChar *prefix, + xmlNsPtr ns, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN int + xmlValidateDocumentFinal(xmlValidCtxtPtr ctxt, + xmlDocPtr doc); +#endif /* LIBXML_VALID_ENABLED */ + +#if defined(LIBXML_VALID_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XML_DEPRECATED +XMLPUBFUN int + xmlValidateNotationUse (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + const xmlChar *notationName); +#endif /* LIBXML_VALID_ENABLED or LIBXML_SCHEMAS_ENABLED */ + +XMLPUBFUN int + xmlIsMixedElement (xmlDocPtr doc, + const xmlChar *name); +XMLPUBFUN xmlAttributePtr + xmlGetDtdAttrDesc (xmlDtdPtr dtd, + const xmlChar *elem, + const xmlChar *name); +XMLPUBFUN xmlAttributePtr + xmlGetDtdQAttrDesc (xmlDtdPtr dtd, + const xmlChar *elem, + const xmlChar *name, + const xmlChar *prefix); +XMLPUBFUN xmlNotationPtr + xmlGetDtdNotationDesc (xmlDtdPtr dtd, + const xmlChar *name); +XMLPUBFUN xmlElementPtr + xmlGetDtdQElementDesc (xmlDtdPtr dtd, + const xmlChar *name, + const xmlChar *prefix); +XMLPUBFUN xmlElementPtr + xmlGetDtdElementDesc (xmlDtdPtr dtd, + const xmlChar *name); + +#ifdef LIBXML_VALID_ENABLED + +XMLPUBFUN int + xmlValidGetPotentialChildren(xmlElementContent *ctree, + const xmlChar **names, + int *len, + int max); + +XMLPUBFUN int + xmlValidGetValidElements(xmlNode *prev, + xmlNode *next, + const xmlChar **names, + int max); +XMLPUBFUN int + xmlValidateNameValue (const xmlChar *value); +XMLPUBFUN int + xmlValidateNamesValue (const xmlChar *value); +XMLPUBFUN int + xmlValidateNmtokenValue (const xmlChar *value); +XMLPUBFUN int + xmlValidateNmtokensValue(const xmlChar *value); + +#ifdef LIBXML_REGEXP_ENABLED +/* + * Validation based on the regexp support + */ +XML_DEPRECATED +XMLPUBFUN int + xmlValidBuildContentModel(xmlValidCtxtPtr ctxt, + xmlElementPtr elem); + +XML_DEPRECATED +XMLPUBFUN int + xmlValidatePushElement (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem, + const xmlChar *qname); +XML_DEPRECATED +XMLPUBFUN int + xmlValidatePushCData (xmlValidCtxtPtr ctxt, + const xmlChar *data, + int len); +XML_DEPRECATED +XMLPUBFUN int + xmlValidatePopElement (xmlValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem, + const xmlChar *qname); +#endif /* LIBXML_REGEXP_ENABLED */ +#endif /* LIBXML_VALID_ENABLED */ +#ifdef __cplusplus +} +#endif +#endif /* __XML_VALID_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xinclude.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xinclude.h new file mode 100644 index 000000000..71fa4c20d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xinclude.h @@ -0,0 +1,136 @@ +/* + * Summary: implementation of XInclude + * Description: API to handle XInclude processing, + * implements the + * World Wide Web Consortium Last Call Working Draft 10 November 2003 + * http://www.w3.org/TR/2003/WD-xinclude-20031110 + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XINCLUDE_H__ +#define __XML_XINCLUDE_H__ + +#include +#include +#include + +#ifdef LIBXML_XINCLUDE_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XINCLUDE_NS: + * + * Macro defining the Xinclude namespace: http://www.w3.org/2003/XInclude + */ +#define XINCLUDE_NS (const xmlChar *) "http://www.w3.org/2003/XInclude" +/** + * XINCLUDE_OLD_NS: + * + * Macro defining the draft Xinclude namespace: http://www.w3.org/2001/XInclude + */ +#define XINCLUDE_OLD_NS (const xmlChar *) "http://www.w3.org/2001/XInclude" +/** + * XINCLUDE_NODE: + * + * Macro defining "include" + */ +#define XINCLUDE_NODE (const xmlChar *) "include" +/** + * XINCLUDE_FALLBACK: + * + * Macro defining "fallback" + */ +#define XINCLUDE_FALLBACK (const xmlChar *) "fallback" +/** + * XINCLUDE_HREF: + * + * Macro defining "href" + */ +#define XINCLUDE_HREF (const xmlChar *) "href" +/** + * XINCLUDE_PARSE: + * + * Macro defining "parse" + */ +#define XINCLUDE_PARSE (const xmlChar *) "parse" +/** + * XINCLUDE_PARSE_XML: + * + * Macro defining "xml" + */ +#define XINCLUDE_PARSE_XML (const xmlChar *) "xml" +/** + * XINCLUDE_PARSE_TEXT: + * + * Macro defining "text" + */ +#define XINCLUDE_PARSE_TEXT (const xmlChar *) "text" +/** + * XINCLUDE_PARSE_ENCODING: + * + * Macro defining "encoding" + */ +#define XINCLUDE_PARSE_ENCODING (const xmlChar *) "encoding" +/** + * XINCLUDE_PARSE_XPOINTER: + * + * Macro defining "xpointer" + */ +#define XINCLUDE_PARSE_XPOINTER (const xmlChar *) "xpointer" + +typedef struct _xmlXIncludeCtxt xmlXIncludeCtxt; +typedef xmlXIncludeCtxt *xmlXIncludeCtxtPtr; + +/* + * standalone processing + */ +XMLPUBFUN int + xmlXIncludeProcess (xmlDocPtr doc); +XMLPUBFUN int + xmlXIncludeProcessFlags (xmlDocPtr doc, + int flags); +XMLPUBFUN int + xmlXIncludeProcessFlagsData(xmlDocPtr doc, + int flags, + void *data); +XMLPUBFUN int + xmlXIncludeProcessTreeFlagsData(xmlNodePtr tree, + int flags, + void *data); +XMLPUBFUN int + xmlXIncludeProcessTree (xmlNodePtr tree); +XMLPUBFUN int + xmlXIncludeProcessTreeFlags(xmlNodePtr tree, + int flags); +/* + * contextual processing + */ +XMLPUBFUN xmlXIncludeCtxtPtr + xmlXIncludeNewContext (xmlDocPtr doc); +XMLPUBFUN int + xmlXIncludeSetFlags (xmlXIncludeCtxtPtr ctxt, + int flags); +XMLPUBFUN void + xmlXIncludeSetErrorHandler(xmlXIncludeCtxtPtr ctxt, + xmlStructuredErrorFunc handler, + void *data); +XMLPUBFUN int + xmlXIncludeGetLastError (xmlXIncludeCtxtPtr ctxt); +XMLPUBFUN void + xmlXIncludeFreeContext (xmlXIncludeCtxtPtr ctxt); +XMLPUBFUN int + xmlXIncludeProcessNode (xmlXIncludeCtxtPtr ctxt, + xmlNodePtr tree); +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XINCLUDE_ENABLED */ + +#endif /* __XML_XINCLUDE_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xlink.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xlink.h new file mode 100644 index 000000000..106573666 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xlink.h @@ -0,0 +1,189 @@ +/* + * Summary: unfinished XLink detection module + * Description: unfinished XLink detection module + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XLINK_H__ +#define __XML_XLINK_H__ + +#include +#include + +#ifdef LIBXML_XPTR_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * Various defines for the various Link properties. + * + * NOTE: the link detection layer will try to resolve QName expansion + * of namespaces. If "foo" is the prefix for "http://foo.com/" + * then the link detection layer will expand role="foo:myrole" + * to "http://foo.com/:myrole". + * NOTE: the link detection layer will expand URI-References found on + * href attributes by using the base mechanism if found. + */ +typedef xmlChar *xlinkHRef; +typedef xmlChar *xlinkRole; +typedef xmlChar *xlinkTitle; + +typedef enum { + XLINK_TYPE_NONE = 0, + XLINK_TYPE_SIMPLE, + XLINK_TYPE_EXTENDED, + XLINK_TYPE_EXTENDED_SET +} xlinkType; + +typedef enum { + XLINK_SHOW_NONE = 0, + XLINK_SHOW_NEW, + XLINK_SHOW_EMBED, + XLINK_SHOW_REPLACE +} xlinkShow; + +typedef enum { + XLINK_ACTUATE_NONE = 0, + XLINK_ACTUATE_AUTO, + XLINK_ACTUATE_ONREQUEST +} xlinkActuate; + +/** + * xlinkNodeDetectFunc: + * @ctx: user data pointer + * @node: the node to check + * + * This is the prototype for the link detection routine. + * It calls the default link detection callbacks upon link detection. + */ +typedef void (*xlinkNodeDetectFunc) (void *ctx, xmlNodePtr node); + +/* + * The link detection module interact with the upper layers using + * a set of callback registered at parsing time. + */ + +/** + * xlinkSimpleLinkFunk: + * @ctx: user data pointer + * @node: the node carrying the link + * @href: the target of the link + * @role: the role string + * @title: the link title + * + * This is the prototype for a simple link detection callback. + */ +typedef void +(*xlinkSimpleLinkFunk) (void *ctx, + xmlNodePtr node, + const xlinkHRef href, + const xlinkRole role, + const xlinkTitle title); + +/** + * xlinkExtendedLinkFunk: + * @ctx: user data pointer + * @node: the node carrying the link + * @nbLocators: the number of locators detected on the link + * @hrefs: pointer to the array of locator hrefs + * @roles: pointer to the array of locator roles + * @nbArcs: the number of arcs detected on the link + * @from: pointer to the array of source roles found on the arcs + * @to: pointer to the array of target roles found on the arcs + * @show: array of values for the show attributes found on the arcs + * @actuate: array of values for the actuate attributes found on the arcs + * @nbTitles: the number of titles detected on the link + * @title: array of titles detected on the link + * @langs: array of xml:lang values for the titles + * + * This is the prototype for a extended link detection callback. + */ +typedef void +(*xlinkExtendedLinkFunk)(void *ctx, + xmlNodePtr node, + int nbLocators, + const xlinkHRef *hrefs, + const xlinkRole *roles, + int nbArcs, + const xlinkRole *from, + const xlinkRole *to, + xlinkShow *show, + xlinkActuate *actuate, + int nbTitles, + const xlinkTitle *titles, + const xmlChar **langs); + +/** + * xlinkExtendedLinkSetFunk: + * @ctx: user data pointer + * @node: the node carrying the link + * @nbLocators: the number of locators detected on the link + * @hrefs: pointer to the array of locator hrefs + * @roles: pointer to the array of locator roles + * @nbTitles: the number of titles detected on the link + * @title: array of titles detected on the link + * @langs: array of xml:lang values for the titles + * + * This is the prototype for a extended link set detection callback. + */ +typedef void +(*xlinkExtendedLinkSetFunk) (void *ctx, + xmlNodePtr node, + int nbLocators, + const xlinkHRef *hrefs, + const xlinkRole *roles, + int nbTitles, + const xlinkTitle *titles, + const xmlChar **langs); + +/** + * This is the structure containing a set of Links detection callbacks. + * + * There is no default xlink callbacks, if one want to get link + * recognition activated, those call backs must be provided before parsing. + */ +typedef struct _xlinkHandler xlinkHandler; +typedef xlinkHandler *xlinkHandlerPtr; +struct _xlinkHandler { + xlinkSimpleLinkFunk simple; + xlinkExtendedLinkFunk extended; + xlinkExtendedLinkSetFunk set; +}; + +/* + * The default detection routine, can be overridden, they call the default + * detection callbacks. + */ + +XMLPUBFUN xlinkNodeDetectFunc + xlinkGetDefaultDetect (void); +XMLPUBFUN void + xlinkSetDefaultDetect (xlinkNodeDetectFunc func); + +/* + * Routines to set/get the default handlers. + */ +XMLPUBFUN xlinkHandlerPtr + xlinkGetDefaultHandler (void); +XMLPUBFUN void + xlinkSetDefaultHandler (xlinkHandlerPtr handler); + +/* + * Link detection module itself. + */ +XMLPUBFUN xlinkType + xlinkIsLink (xmlDocPtr doc, + xmlNodePtr node); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPTR_ENABLED */ + +#endif /* __XML_XLINK_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlIO.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlIO.h new file mode 100644 index 000000000..5a35dc64b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlIO.h @@ -0,0 +1,438 @@ +/* + * Summary: interface for the I/O interfaces used by the parser + * Description: interface for the I/O interfaces used by the parser + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_IO_H__ +#define __XML_IO_H__ + +/** DOC_DISABLE */ +#include +#include +#include +#define XML_TREE_INTERNALS +#include +#undef XML_TREE_INTERNALS +/** DOC_ENABLE */ + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Those are the functions and datatypes for the parser input + * I/O structures. + */ + +/** + * xmlInputMatchCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Input API to detect if the current handler + * can provide input functionality for this resource. + * + * Returns 1 if yes and 0 if another Input module should be used + */ +typedef int (*xmlInputMatchCallback) (char const *filename); +/** + * xmlInputOpenCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Input API to open the resource + * + * Returns an Input context or NULL in case or error + */ +typedef void * (*xmlInputOpenCallback) (char const *filename); +/** + * xmlInputReadCallback: + * @context: an Input context + * @buffer: the buffer to store data read + * @len: the length of the buffer in bytes + * + * Callback used in the I/O Input API to read the resource + * + * Returns the number of bytes read or -1 in case of error + */ +typedef int (*xmlInputReadCallback) (void * context, char * buffer, int len); +/** + * xmlInputCloseCallback: + * @context: an Input context + * + * Callback used in the I/O Input API to close the resource + * + * Returns 0 or -1 in case of error + */ +typedef int (*xmlInputCloseCallback) (void * context); + +#ifdef LIBXML_OUTPUT_ENABLED +/* + * Those are the functions and datatypes for the library output + * I/O structures. + */ + +/** + * xmlOutputMatchCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Output API to detect if the current handler + * can provide output functionality for this resource. + * + * Returns 1 if yes and 0 if another Output module should be used + */ +typedef int (*xmlOutputMatchCallback) (char const *filename); +/** + * xmlOutputOpenCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Output API to open the resource + * + * Returns an Output context or NULL in case or error + */ +typedef void * (*xmlOutputOpenCallback) (char const *filename); +/** + * xmlOutputWriteCallback: + * @context: an Output context + * @buffer: the buffer of data to write + * @len: the length of the buffer in bytes + * + * Callback used in the I/O Output API to write to the resource + * + * Returns the number of bytes written or -1 in case of error + */ +typedef int (*xmlOutputWriteCallback) (void * context, const char * buffer, + int len); +/** + * xmlOutputCloseCallback: + * @context: an Output context + * + * Callback used in the I/O Output API to close the resource + * + * Returns 0 or -1 in case of error + */ +typedef int (*xmlOutputCloseCallback) (void * context); +#endif /* LIBXML_OUTPUT_ENABLED */ + +/** + * xmlParserInputBufferCreateFilenameFunc: + * @URI: the URI to read from + * @enc: the requested source encoding + * + * Signature for the function doing the lookup for a suitable input method + * corresponding to an URI. + * + * Returns the new xmlParserInputBufferPtr in case of success or NULL if no + * method was found. + */ +typedef xmlParserInputBufferPtr +(*xmlParserInputBufferCreateFilenameFunc)(const char *URI, xmlCharEncoding enc); + +/** + * xmlOutputBufferCreateFilenameFunc: + * @URI: the URI to write to + * @enc: the requested target encoding + * + * Signature for the function doing the lookup for a suitable output method + * corresponding to an URI. + * + * Returns the new xmlOutputBufferPtr in case of success or NULL if no + * method was found. + */ +typedef xmlOutputBufferPtr +(*xmlOutputBufferCreateFilenameFunc)(const char *URI, + xmlCharEncodingHandlerPtr encoder, int compression); + +struct _xmlParserInputBuffer { + void* context; + xmlInputReadCallback readcallback; + xmlInputCloseCallback closecallback; + + xmlCharEncodingHandlerPtr encoder; /* I18N conversions to UTF-8 */ + + xmlBufPtr buffer; /* Local buffer encoded in UTF-8 */ + xmlBufPtr raw; /* if encoder != NULL buffer for raw input */ + int compressed; /* -1=unknown, 0=not compressed, 1=compressed */ + int error; + unsigned long rawconsumed;/* amount consumed from raw */ +}; + + +#ifdef LIBXML_OUTPUT_ENABLED +struct _xmlOutputBuffer { + void* context; + xmlOutputWriteCallback writecallback; + xmlOutputCloseCallback closecallback; + + xmlCharEncodingHandlerPtr encoder; /* I18N conversions to UTF-8 */ + + xmlBufPtr buffer; /* Local buffer encoded in UTF-8 or ISOLatin */ + xmlBufPtr conv; /* if encoder != NULL buffer for output */ + int written; /* total number of byte written */ + int error; +}; +#endif /* LIBXML_OUTPUT_ENABLED */ + +/** DOC_DISABLE */ +#define XML_GLOBALS_IO \ + XML_OP(xmlParserInputBufferCreateFilenameValue, \ + xmlParserInputBufferCreateFilenameFunc, XML_DEPRECATED) \ + XML_OP(xmlOutputBufferCreateFilenameValue, \ + xmlOutputBufferCreateFilenameFunc, XML_DEPRECATED) + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_IO +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlParserInputBufferCreateFilenameValue \ + XML_GLOBAL_MACRO(xmlParserInputBufferCreateFilenameValue) + #define xmlOutputBufferCreateFilenameValue \ + XML_GLOBAL_MACRO(xmlOutputBufferCreateFilenameValue) +#endif +/** DOC_ENABLE */ + +/* + * Interfaces for input + */ +XMLPUBFUN void + xmlCleanupInputCallbacks (void); + +XMLPUBFUN int + xmlPopInputCallbacks (void); + +XMLPUBFUN void + xmlRegisterDefaultInputCallbacks (void); +XMLPUBFUN xmlParserInputBufferPtr + xmlAllocParserInputBuffer (xmlCharEncoding enc); + +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateFilename (const char *URI, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateFile (FILE *file, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateFd (int fd, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateMem (const char *mem, int size, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateStatic (const char *mem, int size, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateIO (xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + xmlCharEncoding enc); +XMLPUBFUN int + xmlParserInputBufferRead (xmlParserInputBufferPtr in, + int len); +XMLPUBFUN int + xmlParserInputBufferGrow (xmlParserInputBufferPtr in, + int len); +XMLPUBFUN int + xmlParserInputBufferPush (xmlParserInputBufferPtr in, + int len, + const char *buf); +XMLPUBFUN void + xmlFreeParserInputBuffer (xmlParserInputBufferPtr in); +XMLPUBFUN char * + xmlParserGetDirectory (const char *filename); + +XMLPUBFUN int + xmlRegisterInputCallbacks (xmlInputMatchCallback matchFunc, + xmlInputOpenCallback openFunc, + xmlInputReadCallback readFunc, + xmlInputCloseCallback closeFunc); + +xmlParserInputBufferPtr + __xmlParserInputBufferCreateFilename(const char *URI, + xmlCharEncoding enc); + +#ifdef LIBXML_OUTPUT_ENABLED +/* + * Interfaces for output + */ +XMLPUBFUN void + xmlCleanupOutputCallbacks (void); +XMLPUBFUN int + xmlPopOutputCallbacks (void); +XMLPUBFUN void + xmlRegisterDefaultOutputCallbacks(void); +XMLPUBFUN xmlOutputBufferPtr + xmlAllocOutputBuffer (xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateFilename (const char *URI, + xmlCharEncodingHandlerPtr encoder, + int compression); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateFile (FILE *file, + xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateBuffer (xmlBufferPtr buffer, + xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateFd (int fd, + xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateIO (xmlOutputWriteCallback iowrite, + xmlOutputCloseCallback ioclose, + void *ioctx, + xmlCharEncodingHandlerPtr encoder); + +/* Couple of APIs to get the output without digging into the buffers */ +XMLPUBFUN const xmlChar * + xmlOutputBufferGetContent (xmlOutputBufferPtr out); +XMLPUBFUN size_t + xmlOutputBufferGetSize (xmlOutputBufferPtr out); + +XMLPUBFUN int + xmlOutputBufferWrite (xmlOutputBufferPtr out, + int len, + const char *buf); +XMLPUBFUN int + xmlOutputBufferWriteString (xmlOutputBufferPtr out, + const char *str); +XMLPUBFUN int + xmlOutputBufferWriteEscape (xmlOutputBufferPtr out, + const xmlChar *str, + xmlCharEncodingOutputFunc escaping); + +XMLPUBFUN int + xmlOutputBufferFlush (xmlOutputBufferPtr out); +XMLPUBFUN int + xmlOutputBufferClose (xmlOutputBufferPtr out); + +XMLPUBFUN int + xmlRegisterOutputCallbacks (xmlOutputMatchCallback matchFunc, + xmlOutputOpenCallback openFunc, + xmlOutputWriteCallback writeFunc, + xmlOutputCloseCallback closeFunc); + +xmlOutputBufferPtr + __xmlOutputBufferCreateFilename(const char *URI, + xmlCharEncodingHandlerPtr encoder, + int compression); + +#ifdef LIBXML_HTTP_ENABLED +/* This function only exists if HTTP support built into the library */ +XML_DEPRECATED +XMLPUBFUN void + xmlRegisterHTTPPostCallbacks (void ); +#endif /* LIBXML_HTTP_ENABLED */ + +#endif /* LIBXML_OUTPUT_ENABLED */ + +XML_DEPRECATED +XMLPUBFUN xmlParserInputPtr + xmlCheckHTTPInput (xmlParserCtxtPtr ctxt, + xmlParserInputPtr ret); + +/* + * A predefined entity loader disabling network accesses + */ +XMLPUBFUN xmlParserInputPtr + xmlNoNetExternalEntityLoader (const char *URL, + const char *ID, + xmlParserCtxtPtr ctxt); + +XML_DEPRECATED +XMLPUBFUN xmlChar * + xmlNormalizeWindowsPath (const xmlChar *path); + +XML_DEPRECATED +XMLPUBFUN int + xmlCheckFilename (const char *path); +/** + * Default 'file://' protocol callbacks + */ +XML_DEPRECATED +XMLPUBFUN int + xmlFileMatch (const char *filename); +XML_DEPRECATED +XMLPUBFUN void * + xmlFileOpen (const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlFileRead (void * context, + char * buffer, + int len); +XML_DEPRECATED +XMLPUBFUN int + xmlFileClose (void * context); + +/** + * Default 'http://' protocol callbacks + */ +#ifdef LIBXML_HTTP_ENABLED +XML_DEPRECATED +XMLPUBFUN int + xmlIOHTTPMatch (const char *filename); +XML_DEPRECATED +XMLPUBFUN void * + xmlIOHTTPOpen (const char *filename); +#ifdef LIBXML_OUTPUT_ENABLED +XML_DEPRECATED +XMLPUBFUN void * + xmlIOHTTPOpenW (const char * post_uri, + int compression ); +#endif /* LIBXML_OUTPUT_ENABLED */ +XML_DEPRECATED +XMLPUBFUN int + xmlIOHTTPRead (void * context, + char * buffer, + int len); +XML_DEPRECATED +XMLPUBFUN int + xmlIOHTTPClose (void * context); +#endif /* LIBXML_HTTP_ENABLED */ + +/** + * Default 'ftp://' protocol callbacks + */ +#if defined(LIBXML_FTP_ENABLED) +XML_DEPRECATED +XMLPUBFUN int + xmlIOFTPMatch (const char *filename); +XML_DEPRECATED +XMLPUBFUN void * + xmlIOFTPOpen (const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlIOFTPRead (void * context, + char * buffer, + int len); +XML_DEPRECATED +XMLPUBFUN int + xmlIOFTPClose (void * context); +#endif /* defined(LIBXML_FTP_ENABLED) */ + +XMLPUBFUN xmlParserInputBufferCreateFilenameFunc + xmlParserInputBufferCreateFilenameDefault( + xmlParserInputBufferCreateFilenameFunc func); +XMLPUBFUN xmlOutputBufferCreateFilenameFunc + xmlOutputBufferCreateFilenameDefault( + xmlOutputBufferCreateFilenameFunc func); +XML_DEPRECATED +XMLPUBFUN xmlOutputBufferCreateFilenameFunc + xmlThrDefOutputBufferCreateFilenameDefault( + xmlOutputBufferCreateFilenameFunc func); +XML_DEPRECATED +XMLPUBFUN xmlParserInputBufferCreateFilenameFunc + xmlThrDefParserInputBufferCreateFilenameDefault( + xmlParserInputBufferCreateFilenameFunc func); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_IO_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlautomata.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlautomata.h new file mode 100644 index 000000000..ea38eb37f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlautomata.h @@ -0,0 +1,146 @@ +/* + * Summary: API to build regexp automata + * Description: the API to build regexp automata + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_AUTOMATA_H__ +#define __XML_AUTOMATA_H__ + +#include + +#ifdef LIBXML_REGEXP_ENABLED +#ifdef LIBXML_AUTOMATA_ENABLED + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlAutomataPtr: + * + * A libxml automata description, It can be compiled into a regexp + */ +typedef struct _xmlAutomata xmlAutomata; +typedef xmlAutomata *xmlAutomataPtr; + +/** + * xmlAutomataStatePtr: + * + * A state int the automata description, + */ +typedef struct _xmlAutomataState xmlAutomataState; +typedef xmlAutomataState *xmlAutomataStatePtr; + +/* + * Building API + */ +XMLPUBFUN xmlAutomataPtr + xmlNewAutomata (void); +XMLPUBFUN void + xmlFreeAutomata (xmlAutomataPtr am); + +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataGetInitState (xmlAutomataPtr am); +XMLPUBFUN int + xmlAutomataSetFinalState (xmlAutomataPtr am, + xmlAutomataStatePtr state); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewState (xmlAutomataPtr am); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewTransition (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewTransition2 (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewNegTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + void *data); + +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCountTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCountTrans2 (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewOnceTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewOnceTrans2 (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewAllTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + int lax); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewEpsilon (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCountedTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + int counter); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCounterTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + int counter); +XMLPUBFUN int + xmlAutomataNewCounter (xmlAutomataPtr am, + int min, + int max); + +XMLPUBFUN struct _xmlRegexp * + xmlAutomataCompile (xmlAutomataPtr am); +XMLPUBFUN int + xmlAutomataIsDeterminist (xmlAutomataPtr am); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_AUTOMATA_ENABLED */ +#endif /* LIBXML_REGEXP_ENABLED */ + +#endif /* __XML_AUTOMATA_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlerror.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlerror.h new file mode 100644 index 000000000..36381bec5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlerror.h @@ -0,0 +1,962 @@ +/* + * Summary: error handling + * Description: the API used to report errors + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_ERROR_H__ +#define __XML_ERROR_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlErrorLevel: + * + * Indicates the level of an error + */ +typedef enum { + XML_ERR_NONE = 0, + XML_ERR_WARNING = 1, /* A simple warning */ + XML_ERR_ERROR = 2, /* A recoverable error */ + XML_ERR_FATAL = 3 /* A fatal error */ +} xmlErrorLevel; + +/** + * xmlErrorDomain: + * + * Indicates where an error may have come from + */ +typedef enum { + XML_FROM_NONE = 0, + XML_FROM_PARSER, /* The XML parser */ + XML_FROM_TREE, /* The tree module */ + XML_FROM_NAMESPACE, /* The XML Namespace module */ + XML_FROM_DTD, /* The XML DTD validation with parser context*/ + XML_FROM_HTML, /* The HTML parser */ + XML_FROM_MEMORY, /* The memory allocator */ + XML_FROM_OUTPUT, /* The serialization code */ + XML_FROM_IO, /* The Input/Output stack */ + XML_FROM_FTP, /* The FTP module */ + XML_FROM_HTTP, /* The HTTP module */ + XML_FROM_XINCLUDE, /* The XInclude processing */ + XML_FROM_XPATH, /* The XPath module */ + XML_FROM_XPOINTER, /* The XPointer module */ + XML_FROM_REGEXP, /* The regular expressions module */ + XML_FROM_DATATYPE, /* The W3C XML Schemas Datatype module */ + XML_FROM_SCHEMASP, /* The W3C XML Schemas parser module */ + XML_FROM_SCHEMASV, /* The W3C XML Schemas validation module */ + XML_FROM_RELAXNGP, /* The Relax-NG parser module */ + XML_FROM_RELAXNGV, /* The Relax-NG validator module */ + XML_FROM_CATALOG, /* The Catalog module */ + XML_FROM_C14N, /* The Canonicalization module */ + XML_FROM_XSLT, /* The XSLT engine from libxslt */ + XML_FROM_VALID, /* The XML DTD validation with valid context */ + XML_FROM_CHECK, /* The error checking module */ + XML_FROM_WRITER, /* The xmlwriter module */ + XML_FROM_MODULE, /* The dynamically loaded module module*/ + XML_FROM_I18N, /* The module handling character conversion */ + XML_FROM_SCHEMATRONV,/* The Schematron validator module */ + XML_FROM_BUFFER, /* The buffers module */ + XML_FROM_URI /* The URI module */ +} xmlErrorDomain; + +/** + * xmlError: + * + * An XML Error instance. + */ + +typedef struct _xmlError xmlError; +typedef xmlError *xmlErrorPtr; +struct _xmlError { + int domain; /* What part of the library raised this error */ + int code; /* The error code, e.g. an xmlParserError */ + char *message;/* human-readable informative error message */ + xmlErrorLevel level;/* how consequent is the error */ + char *file; /* the filename */ + int line; /* the line number if available */ + char *str1; /* extra string information */ + char *str2; /* extra string information */ + char *str3; /* extra string information */ + int int1; /* extra number information */ + int int2; /* error column # or 0 if N/A (todo: rename field when we would brk ABI) */ + void *ctxt; /* the parser context if available */ + void *node; /* the node in the tree */ +}; + +/** + * xmlParserError: + * + * This is an error that the XML (or HTML) parser can generate + */ +typedef enum { + XML_ERR_OK = 0, + XML_ERR_INTERNAL_ERROR, /* 1 */ + XML_ERR_NO_MEMORY, /* 2 */ + XML_ERR_DOCUMENT_START, /* 3 */ + XML_ERR_DOCUMENT_EMPTY, /* 4 */ + XML_ERR_DOCUMENT_END, /* 5 */ + XML_ERR_INVALID_HEX_CHARREF, /* 6 */ + XML_ERR_INVALID_DEC_CHARREF, /* 7 */ + XML_ERR_INVALID_CHARREF, /* 8 */ + XML_ERR_INVALID_CHAR, /* 9 */ + XML_ERR_CHARREF_AT_EOF, /* 10 */ + XML_ERR_CHARREF_IN_PROLOG, /* 11 */ + XML_ERR_CHARREF_IN_EPILOG, /* 12 */ + XML_ERR_CHARREF_IN_DTD, /* 13 */ + XML_ERR_ENTITYREF_AT_EOF, /* 14 */ + XML_ERR_ENTITYREF_IN_PROLOG, /* 15 */ + XML_ERR_ENTITYREF_IN_EPILOG, /* 16 */ + XML_ERR_ENTITYREF_IN_DTD, /* 17 */ + XML_ERR_PEREF_AT_EOF, /* 18 */ + XML_ERR_PEREF_IN_PROLOG, /* 19 */ + XML_ERR_PEREF_IN_EPILOG, /* 20 */ + XML_ERR_PEREF_IN_INT_SUBSET, /* 21 */ + XML_ERR_ENTITYREF_NO_NAME, /* 22 */ + XML_ERR_ENTITYREF_SEMICOL_MISSING, /* 23 */ + XML_ERR_PEREF_NO_NAME, /* 24 */ + XML_ERR_PEREF_SEMICOL_MISSING, /* 25 */ + XML_ERR_UNDECLARED_ENTITY, /* 26 */ + XML_WAR_UNDECLARED_ENTITY, /* 27 */ + XML_ERR_UNPARSED_ENTITY, /* 28 */ + XML_ERR_ENTITY_IS_EXTERNAL, /* 29 */ + XML_ERR_ENTITY_IS_PARAMETER, /* 30 */ + XML_ERR_UNKNOWN_ENCODING, /* 31 */ + XML_ERR_UNSUPPORTED_ENCODING, /* 32 */ + XML_ERR_STRING_NOT_STARTED, /* 33 */ + XML_ERR_STRING_NOT_CLOSED, /* 34 */ + XML_ERR_NS_DECL_ERROR, /* 35 */ + XML_ERR_ENTITY_NOT_STARTED, /* 36 */ + XML_ERR_ENTITY_NOT_FINISHED, /* 37 */ + XML_ERR_LT_IN_ATTRIBUTE, /* 38 */ + XML_ERR_ATTRIBUTE_NOT_STARTED, /* 39 */ + XML_ERR_ATTRIBUTE_NOT_FINISHED, /* 40 */ + XML_ERR_ATTRIBUTE_WITHOUT_VALUE, /* 41 */ + XML_ERR_ATTRIBUTE_REDEFINED, /* 42 */ + XML_ERR_LITERAL_NOT_STARTED, /* 43 */ + XML_ERR_LITERAL_NOT_FINISHED, /* 44 */ + XML_ERR_COMMENT_NOT_FINISHED, /* 45 */ + XML_ERR_PI_NOT_STARTED, /* 46 */ + XML_ERR_PI_NOT_FINISHED, /* 47 */ + XML_ERR_NOTATION_NOT_STARTED, /* 48 */ + XML_ERR_NOTATION_NOT_FINISHED, /* 49 */ + XML_ERR_ATTLIST_NOT_STARTED, /* 50 */ + XML_ERR_ATTLIST_NOT_FINISHED, /* 51 */ + XML_ERR_MIXED_NOT_STARTED, /* 52 */ + XML_ERR_MIXED_NOT_FINISHED, /* 53 */ + XML_ERR_ELEMCONTENT_NOT_STARTED, /* 54 */ + XML_ERR_ELEMCONTENT_NOT_FINISHED, /* 55 */ + XML_ERR_XMLDECL_NOT_STARTED, /* 56 */ + XML_ERR_XMLDECL_NOT_FINISHED, /* 57 */ + XML_ERR_CONDSEC_NOT_STARTED, /* 58 */ + XML_ERR_CONDSEC_NOT_FINISHED, /* 59 */ + XML_ERR_EXT_SUBSET_NOT_FINISHED, /* 60 */ + XML_ERR_DOCTYPE_NOT_FINISHED, /* 61 */ + XML_ERR_MISPLACED_CDATA_END, /* 62 */ + XML_ERR_CDATA_NOT_FINISHED, /* 63 */ + XML_ERR_RESERVED_XML_NAME, /* 64 */ + XML_ERR_SPACE_REQUIRED, /* 65 */ + XML_ERR_SEPARATOR_REQUIRED, /* 66 */ + XML_ERR_NMTOKEN_REQUIRED, /* 67 */ + XML_ERR_NAME_REQUIRED, /* 68 */ + XML_ERR_PCDATA_REQUIRED, /* 69 */ + XML_ERR_URI_REQUIRED, /* 70 */ + XML_ERR_PUBID_REQUIRED, /* 71 */ + XML_ERR_LT_REQUIRED, /* 72 */ + XML_ERR_GT_REQUIRED, /* 73 */ + XML_ERR_LTSLASH_REQUIRED, /* 74 */ + XML_ERR_EQUAL_REQUIRED, /* 75 */ + XML_ERR_TAG_NAME_MISMATCH, /* 76 */ + XML_ERR_TAG_NOT_FINISHED, /* 77 */ + XML_ERR_STANDALONE_VALUE, /* 78 */ + XML_ERR_ENCODING_NAME, /* 79 */ + XML_ERR_HYPHEN_IN_COMMENT, /* 80 */ + XML_ERR_INVALID_ENCODING, /* 81 */ + XML_ERR_EXT_ENTITY_STANDALONE, /* 82 */ + XML_ERR_CONDSEC_INVALID, /* 83 */ + XML_ERR_VALUE_REQUIRED, /* 84 */ + XML_ERR_NOT_WELL_BALANCED, /* 85 */ + XML_ERR_EXTRA_CONTENT, /* 86 */ + XML_ERR_ENTITY_CHAR_ERROR, /* 87 */ + XML_ERR_ENTITY_PE_INTERNAL, /* 88 */ + XML_ERR_ENTITY_LOOP, /* 89 */ + XML_ERR_ENTITY_BOUNDARY, /* 90 */ + XML_ERR_INVALID_URI, /* 91 */ + XML_ERR_URI_FRAGMENT, /* 92 */ + XML_WAR_CATALOG_PI, /* 93 */ + XML_ERR_NO_DTD, /* 94 */ + XML_ERR_CONDSEC_INVALID_KEYWORD, /* 95 */ + XML_ERR_VERSION_MISSING, /* 96 */ + XML_WAR_UNKNOWN_VERSION, /* 97 */ + XML_WAR_LANG_VALUE, /* 98 */ + XML_WAR_NS_URI, /* 99 */ + XML_WAR_NS_URI_RELATIVE, /* 100 */ + XML_ERR_MISSING_ENCODING, /* 101 */ + XML_WAR_SPACE_VALUE, /* 102 */ + XML_ERR_NOT_STANDALONE, /* 103 */ + XML_ERR_ENTITY_PROCESSING, /* 104 */ + XML_ERR_NOTATION_PROCESSING, /* 105 */ + XML_WAR_NS_COLUMN, /* 106 */ + XML_WAR_ENTITY_REDEFINED, /* 107 */ + XML_ERR_UNKNOWN_VERSION, /* 108 */ + XML_ERR_VERSION_MISMATCH, /* 109 */ + XML_ERR_NAME_TOO_LONG, /* 110 */ + XML_ERR_USER_STOP, /* 111 */ + XML_ERR_COMMENT_ABRUPTLY_ENDED, /* 112 */ + XML_WAR_ENCODING_MISMATCH, /* 113 */ + XML_ERR_RESOURCE_LIMIT, /* 114 */ + XML_ERR_ARGUMENT, /* 115 */ + XML_ERR_SYSTEM, /* 116 */ + XML_ERR_REDECL_PREDEF_ENTITY, /* 117 */ + XML_ERR_INT_SUBSET_NOT_FINISHED, /* 118 */ + XML_NS_ERR_XML_NAMESPACE = 200, + XML_NS_ERR_UNDEFINED_NAMESPACE, /* 201 */ + XML_NS_ERR_QNAME, /* 202 */ + XML_NS_ERR_ATTRIBUTE_REDEFINED, /* 203 */ + XML_NS_ERR_EMPTY, /* 204 */ + XML_NS_ERR_COLON, /* 205 */ + XML_DTD_ATTRIBUTE_DEFAULT = 500, + XML_DTD_ATTRIBUTE_REDEFINED, /* 501 */ + XML_DTD_ATTRIBUTE_VALUE, /* 502 */ + XML_DTD_CONTENT_ERROR, /* 503 */ + XML_DTD_CONTENT_MODEL, /* 504 */ + XML_DTD_CONTENT_NOT_DETERMINIST, /* 505 */ + XML_DTD_DIFFERENT_PREFIX, /* 506 */ + XML_DTD_ELEM_DEFAULT_NAMESPACE, /* 507 */ + XML_DTD_ELEM_NAMESPACE, /* 508 */ + XML_DTD_ELEM_REDEFINED, /* 509 */ + XML_DTD_EMPTY_NOTATION, /* 510 */ + XML_DTD_ENTITY_TYPE, /* 511 */ + XML_DTD_ID_FIXED, /* 512 */ + XML_DTD_ID_REDEFINED, /* 513 */ + XML_DTD_ID_SUBSET, /* 514 */ + XML_DTD_INVALID_CHILD, /* 515 */ + XML_DTD_INVALID_DEFAULT, /* 516 */ + XML_DTD_LOAD_ERROR, /* 517 */ + XML_DTD_MISSING_ATTRIBUTE, /* 518 */ + XML_DTD_MIXED_CORRUPT, /* 519 */ + XML_DTD_MULTIPLE_ID, /* 520 */ + XML_DTD_NO_DOC, /* 521 */ + XML_DTD_NO_DTD, /* 522 */ + XML_DTD_NO_ELEM_NAME, /* 523 */ + XML_DTD_NO_PREFIX, /* 524 */ + XML_DTD_NO_ROOT, /* 525 */ + XML_DTD_NOTATION_REDEFINED, /* 526 */ + XML_DTD_NOTATION_VALUE, /* 527 */ + XML_DTD_NOT_EMPTY, /* 528 */ + XML_DTD_NOT_PCDATA, /* 529 */ + XML_DTD_NOT_STANDALONE, /* 530 */ + XML_DTD_ROOT_NAME, /* 531 */ + XML_DTD_STANDALONE_WHITE_SPACE, /* 532 */ + XML_DTD_UNKNOWN_ATTRIBUTE, /* 533 */ + XML_DTD_UNKNOWN_ELEM, /* 534 */ + XML_DTD_UNKNOWN_ENTITY, /* 535 */ + XML_DTD_UNKNOWN_ID, /* 536 */ + XML_DTD_UNKNOWN_NOTATION, /* 537 */ + XML_DTD_STANDALONE_DEFAULTED, /* 538 */ + XML_DTD_XMLID_VALUE, /* 539 */ + XML_DTD_XMLID_TYPE, /* 540 */ + XML_DTD_DUP_TOKEN, /* 541 */ + XML_HTML_STRUCURE_ERROR = 800, + XML_HTML_UNKNOWN_TAG, /* 801 */ + XML_HTML_INCORRECTLY_OPENED_COMMENT, /* 802 */ + XML_RNGP_ANYNAME_ATTR_ANCESTOR = 1000, + XML_RNGP_ATTR_CONFLICT, /* 1001 */ + XML_RNGP_ATTRIBUTE_CHILDREN, /* 1002 */ + XML_RNGP_ATTRIBUTE_CONTENT, /* 1003 */ + XML_RNGP_ATTRIBUTE_EMPTY, /* 1004 */ + XML_RNGP_ATTRIBUTE_NOOP, /* 1005 */ + XML_RNGP_CHOICE_CONTENT, /* 1006 */ + XML_RNGP_CHOICE_EMPTY, /* 1007 */ + XML_RNGP_CREATE_FAILURE, /* 1008 */ + XML_RNGP_DATA_CONTENT, /* 1009 */ + XML_RNGP_DEF_CHOICE_AND_INTERLEAVE, /* 1010 */ + XML_RNGP_DEFINE_CREATE_FAILED, /* 1011 */ + XML_RNGP_DEFINE_EMPTY, /* 1012 */ + XML_RNGP_DEFINE_MISSING, /* 1013 */ + XML_RNGP_DEFINE_NAME_MISSING, /* 1014 */ + XML_RNGP_ELEM_CONTENT_EMPTY, /* 1015 */ + XML_RNGP_ELEM_CONTENT_ERROR, /* 1016 */ + XML_RNGP_ELEMENT_EMPTY, /* 1017 */ + XML_RNGP_ELEMENT_CONTENT, /* 1018 */ + XML_RNGP_ELEMENT_NAME, /* 1019 */ + XML_RNGP_ELEMENT_NO_CONTENT, /* 1020 */ + XML_RNGP_ELEM_TEXT_CONFLICT, /* 1021 */ + XML_RNGP_EMPTY, /* 1022 */ + XML_RNGP_EMPTY_CONSTRUCT, /* 1023 */ + XML_RNGP_EMPTY_CONTENT, /* 1024 */ + XML_RNGP_EMPTY_NOT_EMPTY, /* 1025 */ + XML_RNGP_ERROR_TYPE_LIB, /* 1026 */ + XML_RNGP_EXCEPT_EMPTY, /* 1027 */ + XML_RNGP_EXCEPT_MISSING, /* 1028 */ + XML_RNGP_EXCEPT_MULTIPLE, /* 1029 */ + XML_RNGP_EXCEPT_NO_CONTENT, /* 1030 */ + XML_RNGP_EXTERNALREF_EMTPY, /* 1031 */ + XML_RNGP_EXTERNAL_REF_FAILURE, /* 1032 */ + XML_RNGP_EXTERNALREF_RECURSE, /* 1033 */ + XML_RNGP_FORBIDDEN_ATTRIBUTE, /* 1034 */ + XML_RNGP_FOREIGN_ELEMENT, /* 1035 */ + XML_RNGP_GRAMMAR_CONTENT, /* 1036 */ + XML_RNGP_GRAMMAR_EMPTY, /* 1037 */ + XML_RNGP_GRAMMAR_MISSING, /* 1038 */ + XML_RNGP_GRAMMAR_NO_START, /* 1039 */ + XML_RNGP_GROUP_ATTR_CONFLICT, /* 1040 */ + XML_RNGP_HREF_ERROR, /* 1041 */ + XML_RNGP_INCLUDE_EMPTY, /* 1042 */ + XML_RNGP_INCLUDE_FAILURE, /* 1043 */ + XML_RNGP_INCLUDE_RECURSE, /* 1044 */ + XML_RNGP_INTERLEAVE_ADD, /* 1045 */ + XML_RNGP_INTERLEAVE_CREATE_FAILED, /* 1046 */ + XML_RNGP_INTERLEAVE_EMPTY, /* 1047 */ + XML_RNGP_INTERLEAVE_NO_CONTENT, /* 1048 */ + XML_RNGP_INVALID_DEFINE_NAME, /* 1049 */ + XML_RNGP_INVALID_URI, /* 1050 */ + XML_RNGP_INVALID_VALUE, /* 1051 */ + XML_RNGP_MISSING_HREF, /* 1052 */ + XML_RNGP_NAME_MISSING, /* 1053 */ + XML_RNGP_NEED_COMBINE, /* 1054 */ + XML_RNGP_NOTALLOWED_NOT_EMPTY, /* 1055 */ + XML_RNGP_NSNAME_ATTR_ANCESTOR, /* 1056 */ + XML_RNGP_NSNAME_NO_NS, /* 1057 */ + XML_RNGP_PARAM_FORBIDDEN, /* 1058 */ + XML_RNGP_PARAM_NAME_MISSING, /* 1059 */ + XML_RNGP_PARENTREF_CREATE_FAILED, /* 1060 */ + XML_RNGP_PARENTREF_NAME_INVALID, /* 1061 */ + XML_RNGP_PARENTREF_NO_NAME, /* 1062 */ + XML_RNGP_PARENTREF_NO_PARENT, /* 1063 */ + XML_RNGP_PARENTREF_NOT_EMPTY, /* 1064 */ + XML_RNGP_PARSE_ERROR, /* 1065 */ + XML_RNGP_PAT_ANYNAME_EXCEPT_ANYNAME, /* 1066 */ + XML_RNGP_PAT_ATTR_ATTR, /* 1067 */ + XML_RNGP_PAT_ATTR_ELEM, /* 1068 */ + XML_RNGP_PAT_DATA_EXCEPT_ATTR, /* 1069 */ + XML_RNGP_PAT_DATA_EXCEPT_ELEM, /* 1070 */ + XML_RNGP_PAT_DATA_EXCEPT_EMPTY, /* 1071 */ + XML_RNGP_PAT_DATA_EXCEPT_GROUP, /* 1072 */ + XML_RNGP_PAT_DATA_EXCEPT_INTERLEAVE, /* 1073 */ + XML_RNGP_PAT_DATA_EXCEPT_LIST, /* 1074 */ + XML_RNGP_PAT_DATA_EXCEPT_ONEMORE, /* 1075 */ + XML_RNGP_PAT_DATA_EXCEPT_REF, /* 1076 */ + XML_RNGP_PAT_DATA_EXCEPT_TEXT, /* 1077 */ + XML_RNGP_PAT_LIST_ATTR, /* 1078 */ + XML_RNGP_PAT_LIST_ELEM, /* 1079 */ + XML_RNGP_PAT_LIST_INTERLEAVE, /* 1080 */ + XML_RNGP_PAT_LIST_LIST, /* 1081 */ + XML_RNGP_PAT_LIST_REF, /* 1082 */ + XML_RNGP_PAT_LIST_TEXT, /* 1083 */ + XML_RNGP_PAT_NSNAME_EXCEPT_ANYNAME, /* 1084 */ + XML_RNGP_PAT_NSNAME_EXCEPT_NSNAME, /* 1085 */ + XML_RNGP_PAT_ONEMORE_GROUP_ATTR, /* 1086 */ + XML_RNGP_PAT_ONEMORE_INTERLEAVE_ATTR, /* 1087 */ + XML_RNGP_PAT_START_ATTR, /* 1088 */ + XML_RNGP_PAT_START_DATA, /* 1089 */ + XML_RNGP_PAT_START_EMPTY, /* 1090 */ + XML_RNGP_PAT_START_GROUP, /* 1091 */ + XML_RNGP_PAT_START_INTERLEAVE, /* 1092 */ + XML_RNGP_PAT_START_LIST, /* 1093 */ + XML_RNGP_PAT_START_ONEMORE, /* 1094 */ + XML_RNGP_PAT_START_TEXT, /* 1095 */ + XML_RNGP_PAT_START_VALUE, /* 1096 */ + XML_RNGP_PREFIX_UNDEFINED, /* 1097 */ + XML_RNGP_REF_CREATE_FAILED, /* 1098 */ + XML_RNGP_REF_CYCLE, /* 1099 */ + XML_RNGP_REF_NAME_INVALID, /* 1100 */ + XML_RNGP_REF_NO_DEF, /* 1101 */ + XML_RNGP_REF_NO_NAME, /* 1102 */ + XML_RNGP_REF_NOT_EMPTY, /* 1103 */ + XML_RNGP_START_CHOICE_AND_INTERLEAVE, /* 1104 */ + XML_RNGP_START_CONTENT, /* 1105 */ + XML_RNGP_START_EMPTY, /* 1106 */ + XML_RNGP_START_MISSING, /* 1107 */ + XML_RNGP_TEXT_EXPECTED, /* 1108 */ + XML_RNGP_TEXT_HAS_CHILD, /* 1109 */ + XML_RNGP_TYPE_MISSING, /* 1110 */ + XML_RNGP_TYPE_NOT_FOUND, /* 1111 */ + XML_RNGP_TYPE_VALUE, /* 1112 */ + XML_RNGP_UNKNOWN_ATTRIBUTE, /* 1113 */ + XML_RNGP_UNKNOWN_COMBINE, /* 1114 */ + XML_RNGP_UNKNOWN_CONSTRUCT, /* 1115 */ + XML_RNGP_UNKNOWN_TYPE_LIB, /* 1116 */ + XML_RNGP_URI_FRAGMENT, /* 1117 */ + XML_RNGP_URI_NOT_ABSOLUTE, /* 1118 */ + XML_RNGP_VALUE_EMPTY, /* 1119 */ + XML_RNGP_VALUE_NO_CONTENT, /* 1120 */ + XML_RNGP_XMLNS_NAME, /* 1121 */ + XML_RNGP_XML_NS, /* 1122 */ + XML_XPATH_EXPRESSION_OK = 1200, + XML_XPATH_NUMBER_ERROR, /* 1201 */ + XML_XPATH_UNFINISHED_LITERAL_ERROR, /* 1202 */ + XML_XPATH_START_LITERAL_ERROR, /* 1203 */ + XML_XPATH_VARIABLE_REF_ERROR, /* 1204 */ + XML_XPATH_UNDEF_VARIABLE_ERROR, /* 1205 */ + XML_XPATH_INVALID_PREDICATE_ERROR, /* 1206 */ + XML_XPATH_EXPR_ERROR, /* 1207 */ + XML_XPATH_UNCLOSED_ERROR, /* 1208 */ + XML_XPATH_UNKNOWN_FUNC_ERROR, /* 1209 */ + XML_XPATH_INVALID_OPERAND, /* 1210 */ + XML_XPATH_INVALID_TYPE, /* 1211 */ + XML_XPATH_INVALID_ARITY, /* 1212 */ + XML_XPATH_INVALID_CTXT_SIZE, /* 1213 */ + XML_XPATH_INVALID_CTXT_POSITION, /* 1214 */ + XML_XPATH_MEMORY_ERROR, /* 1215 */ + XML_XPTR_SYNTAX_ERROR, /* 1216 */ + XML_XPTR_RESOURCE_ERROR, /* 1217 */ + XML_XPTR_SUB_RESOURCE_ERROR, /* 1218 */ + XML_XPATH_UNDEF_PREFIX_ERROR, /* 1219 */ + XML_XPATH_ENCODING_ERROR, /* 1220 */ + XML_XPATH_INVALID_CHAR_ERROR, /* 1221 */ + XML_TREE_INVALID_HEX = 1300, + XML_TREE_INVALID_DEC, /* 1301 */ + XML_TREE_UNTERMINATED_ENTITY, /* 1302 */ + XML_TREE_NOT_UTF8, /* 1303 */ + XML_SAVE_NOT_UTF8 = 1400, + XML_SAVE_CHAR_INVALID, /* 1401 */ + XML_SAVE_NO_DOCTYPE, /* 1402 */ + XML_SAVE_UNKNOWN_ENCODING, /* 1403 */ + XML_REGEXP_COMPILE_ERROR = 1450, + XML_IO_UNKNOWN = 1500, + XML_IO_EACCES, /* 1501 */ + XML_IO_EAGAIN, /* 1502 */ + XML_IO_EBADF, /* 1503 */ + XML_IO_EBADMSG, /* 1504 */ + XML_IO_EBUSY, /* 1505 */ + XML_IO_ECANCELED, /* 1506 */ + XML_IO_ECHILD, /* 1507 */ + XML_IO_EDEADLK, /* 1508 */ + XML_IO_EDOM, /* 1509 */ + XML_IO_EEXIST, /* 1510 */ + XML_IO_EFAULT, /* 1511 */ + XML_IO_EFBIG, /* 1512 */ + XML_IO_EINPROGRESS, /* 1513 */ + XML_IO_EINTR, /* 1514 */ + XML_IO_EINVAL, /* 1515 */ + XML_IO_EIO, /* 1516 */ + XML_IO_EISDIR, /* 1517 */ + XML_IO_EMFILE, /* 1518 */ + XML_IO_EMLINK, /* 1519 */ + XML_IO_EMSGSIZE, /* 1520 */ + XML_IO_ENAMETOOLONG, /* 1521 */ + XML_IO_ENFILE, /* 1522 */ + XML_IO_ENODEV, /* 1523 */ + XML_IO_ENOENT, /* 1524 */ + XML_IO_ENOEXEC, /* 1525 */ + XML_IO_ENOLCK, /* 1526 */ + XML_IO_ENOMEM, /* 1527 */ + XML_IO_ENOSPC, /* 1528 */ + XML_IO_ENOSYS, /* 1529 */ + XML_IO_ENOTDIR, /* 1530 */ + XML_IO_ENOTEMPTY, /* 1531 */ + XML_IO_ENOTSUP, /* 1532 */ + XML_IO_ENOTTY, /* 1533 */ + XML_IO_ENXIO, /* 1534 */ + XML_IO_EPERM, /* 1535 */ + XML_IO_EPIPE, /* 1536 */ + XML_IO_ERANGE, /* 1537 */ + XML_IO_EROFS, /* 1538 */ + XML_IO_ESPIPE, /* 1539 */ + XML_IO_ESRCH, /* 1540 */ + XML_IO_ETIMEDOUT, /* 1541 */ + XML_IO_EXDEV, /* 1542 */ + XML_IO_NETWORK_ATTEMPT, /* 1543 */ + XML_IO_ENCODER, /* 1544 */ + XML_IO_FLUSH, /* 1545 */ + XML_IO_WRITE, /* 1546 */ + XML_IO_NO_INPUT, /* 1547 */ + XML_IO_BUFFER_FULL, /* 1548 */ + XML_IO_LOAD_ERROR, /* 1549 */ + XML_IO_ENOTSOCK, /* 1550 */ + XML_IO_EISCONN, /* 1551 */ + XML_IO_ECONNREFUSED, /* 1552 */ + XML_IO_ENETUNREACH, /* 1553 */ + XML_IO_EADDRINUSE, /* 1554 */ + XML_IO_EALREADY, /* 1555 */ + XML_IO_EAFNOSUPPORT, /* 1556 */ + XML_IO_UNSUPPORTED_PROTOCOL, /* 1557 */ + XML_XINCLUDE_RECURSION=1600, + XML_XINCLUDE_PARSE_VALUE, /* 1601 */ + XML_XINCLUDE_ENTITY_DEF_MISMATCH, /* 1602 */ + XML_XINCLUDE_NO_HREF, /* 1603 */ + XML_XINCLUDE_NO_FALLBACK, /* 1604 */ + XML_XINCLUDE_HREF_URI, /* 1605 */ + XML_XINCLUDE_TEXT_FRAGMENT, /* 1606 */ + XML_XINCLUDE_TEXT_DOCUMENT, /* 1607 */ + XML_XINCLUDE_INVALID_CHAR, /* 1608 */ + XML_XINCLUDE_BUILD_FAILED, /* 1609 */ + XML_XINCLUDE_UNKNOWN_ENCODING, /* 1610 */ + XML_XINCLUDE_MULTIPLE_ROOT, /* 1611 */ + XML_XINCLUDE_XPTR_FAILED, /* 1612 */ + XML_XINCLUDE_XPTR_RESULT, /* 1613 */ + XML_XINCLUDE_INCLUDE_IN_INCLUDE, /* 1614 */ + XML_XINCLUDE_FALLBACKS_IN_INCLUDE, /* 1615 */ + XML_XINCLUDE_FALLBACK_NOT_IN_INCLUDE, /* 1616 */ + XML_XINCLUDE_DEPRECATED_NS, /* 1617 */ + XML_XINCLUDE_FRAGMENT_ID, /* 1618 */ + XML_CATALOG_MISSING_ATTR = 1650, + XML_CATALOG_ENTRY_BROKEN, /* 1651 */ + XML_CATALOG_PREFER_VALUE, /* 1652 */ + XML_CATALOG_NOT_CATALOG, /* 1653 */ + XML_CATALOG_RECURSION, /* 1654 */ + XML_SCHEMAP_PREFIX_UNDEFINED = 1700, + XML_SCHEMAP_ATTRFORMDEFAULT_VALUE, /* 1701 */ + XML_SCHEMAP_ATTRGRP_NONAME_NOREF, /* 1702 */ + XML_SCHEMAP_ATTR_NONAME_NOREF, /* 1703 */ + XML_SCHEMAP_COMPLEXTYPE_NONAME_NOREF, /* 1704 */ + XML_SCHEMAP_ELEMFORMDEFAULT_VALUE, /* 1705 */ + XML_SCHEMAP_ELEM_NONAME_NOREF, /* 1706 */ + XML_SCHEMAP_EXTENSION_NO_BASE, /* 1707 */ + XML_SCHEMAP_FACET_NO_VALUE, /* 1708 */ + XML_SCHEMAP_FAILED_BUILD_IMPORT, /* 1709 */ + XML_SCHEMAP_GROUP_NONAME_NOREF, /* 1710 */ + XML_SCHEMAP_IMPORT_NAMESPACE_NOT_URI, /* 1711 */ + XML_SCHEMAP_IMPORT_REDEFINE_NSNAME, /* 1712 */ + XML_SCHEMAP_IMPORT_SCHEMA_NOT_URI, /* 1713 */ + XML_SCHEMAP_INVALID_BOOLEAN, /* 1714 */ + XML_SCHEMAP_INVALID_ENUM, /* 1715 */ + XML_SCHEMAP_INVALID_FACET, /* 1716 */ + XML_SCHEMAP_INVALID_FACET_VALUE, /* 1717 */ + XML_SCHEMAP_INVALID_MAXOCCURS, /* 1718 */ + XML_SCHEMAP_INVALID_MINOCCURS, /* 1719 */ + XML_SCHEMAP_INVALID_REF_AND_SUBTYPE, /* 1720 */ + XML_SCHEMAP_INVALID_WHITE_SPACE, /* 1721 */ + XML_SCHEMAP_NOATTR_NOREF, /* 1722 */ + XML_SCHEMAP_NOTATION_NO_NAME, /* 1723 */ + XML_SCHEMAP_NOTYPE_NOREF, /* 1724 */ + XML_SCHEMAP_REF_AND_SUBTYPE, /* 1725 */ + XML_SCHEMAP_RESTRICTION_NONAME_NOREF, /* 1726 */ + XML_SCHEMAP_SIMPLETYPE_NONAME, /* 1727 */ + XML_SCHEMAP_TYPE_AND_SUBTYPE, /* 1728 */ + XML_SCHEMAP_UNKNOWN_ALL_CHILD, /* 1729 */ + XML_SCHEMAP_UNKNOWN_ANYATTRIBUTE_CHILD, /* 1730 */ + XML_SCHEMAP_UNKNOWN_ATTR_CHILD, /* 1731 */ + XML_SCHEMAP_UNKNOWN_ATTRGRP_CHILD, /* 1732 */ + XML_SCHEMAP_UNKNOWN_ATTRIBUTE_GROUP, /* 1733 */ + XML_SCHEMAP_UNKNOWN_BASE_TYPE, /* 1734 */ + XML_SCHEMAP_UNKNOWN_CHOICE_CHILD, /* 1735 */ + XML_SCHEMAP_UNKNOWN_COMPLEXCONTENT_CHILD, /* 1736 */ + XML_SCHEMAP_UNKNOWN_COMPLEXTYPE_CHILD, /* 1737 */ + XML_SCHEMAP_UNKNOWN_ELEM_CHILD, /* 1738 */ + XML_SCHEMAP_UNKNOWN_EXTENSION_CHILD, /* 1739 */ + XML_SCHEMAP_UNKNOWN_FACET_CHILD, /* 1740 */ + XML_SCHEMAP_UNKNOWN_FACET_TYPE, /* 1741 */ + XML_SCHEMAP_UNKNOWN_GROUP_CHILD, /* 1742 */ + XML_SCHEMAP_UNKNOWN_IMPORT_CHILD, /* 1743 */ + XML_SCHEMAP_UNKNOWN_LIST_CHILD, /* 1744 */ + XML_SCHEMAP_UNKNOWN_NOTATION_CHILD, /* 1745 */ + XML_SCHEMAP_UNKNOWN_PROCESSCONTENT_CHILD, /* 1746 */ + XML_SCHEMAP_UNKNOWN_REF, /* 1747 */ + XML_SCHEMAP_UNKNOWN_RESTRICTION_CHILD, /* 1748 */ + XML_SCHEMAP_UNKNOWN_SCHEMAS_CHILD, /* 1749 */ + XML_SCHEMAP_UNKNOWN_SEQUENCE_CHILD, /* 1750 */ + XML_SCHEMAP_UNKNOWN_SIMPLECONTENT_CHILD, /* 1751 */ + XML_SCHEMAP_UNKNOWN_SIMPLETYPE_CHILD, /* 1752 */ + XML_SCHEMAP_UNKNOWN_TYPE, /* 1753 */ + XML_SCHEMAP_UNKNOWN_UNION_CHILD, /* 1754 */ + XML_SCHEMAP_ELEM_DEFAULT_FIXED, /* 1755 */ + XML_SCHEMAP_REGEXP_INVALID, /* 1756 */ + XML_SCHEMAP_FAILED_LOAD, /* 1757 */ + XML_SCHEMAP_NOTHING_TO_PARSE, /* 1758 */ + XML_SCHEMAP_NOROOT, /* 1759 */ + XML_SCHEMAP_REDEFINED_GROUP, /* 1760 */ + XML_SCHEMAP_REDEFINED_TYPE, /* 1761 */ + XML_SCHEMAP_REDEFINED_ELEMENT, /* 1762 */ + XML_SCHEMAP_REDEFINED_ATTRGROUP, /* 1763 */ + XML_SCHEMAP_REDEFINED_ATTR, /* 1764 */ + XML_SCHEMAP_REDEFINED_NOTATION, /* 1765 */ + XML_SCHEMAP_FAILED_PARSE, /* 1766 */ + XML_SCHEMAP_UNKNOWN_PREFIX, /* 1767 */ + XML_SCHEMAP_DEF_AND_PREFIX, /* 1768 */ + XML_SCHEMAP_UNKNOWN_INCLUDE_CHILD, /* 1769 */ + XML_SCHEMAP_INCLUDE_SCHEMA_NOT_URI, /* 1770 */ + XML_SCHEMAP_INCLUDE_SCHEMA_NO_URI, /* 1771 */ + XML_SCHEMAP_NOT_SCHEMA, /* 1772 */ + XML_SCHEMAP_UNKNOWN_MEMBER_TYPE, /* 1773 */ + XML_SCHEMAP_INVALID_ATTR_USE, /* 1774 */ + XML_SCHEMAP_RECURSIVE, /* 1775 */ + XML_SCHEMAP_SUPERNUMEROUS_LIST_ITEM_TYPE, /* 1776 */ + XML_SCHEMAP_INVALID_ATTR_COMBINATION, /* 1777 */ + XML_SCHEMAP_INVALID_ATTR_INLINE_COMBINATION, /* 1778 */ + XML_SCHEMAP_MISSING_SIMPLETYPE_CHILD, /* 1779 */ + XML_SCHEMAP_INVALID_ATTR_NAME, /* 1780 */ + XML_SCHEMAP_REF_AND_CONTENT, /* 1781 */ + XML_SCHEMAP_CT_PROPS_CORRECT_1, /* 1782 */ + XML_SCHEMAP_CT_PROPS_CORRECT_2, /* 1783 */ + XML_SCHEMAP_CT_PROPS_CORRECT_3, /* 1784 */ + XML_SCHEMAP_CT_PROPS_CORRECT_4, /* 1785 */ + XML_SCHEMAP_CT_PROPS_CORRECT_5, /* 1786 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_1, /* 1787 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_1_1, /* 1788 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_1_2, /* 1789 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_2, /* 1790 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_3, /* 1791 */ + XML_SCHEMAP_WILDCARD_INVALID_NS_MEMBER, /* 1792 */ + XML_SCHEMAP_INTERSECTION_NOT_EXPRESSIBLE, /* 1793 */ + XML_SCHEMAP_UNION_NOT_EXPRESSIBLE, /* 1794 */ + XML_SCHEMAP_SRC_IMPORT_3_1, /* 1795 */ + XML_SCHEMAP_SRC_IMPORT_3_2, /* 1796 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_4_1, /* 1797 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_4_2, /* 1798 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_4_3, /* 1799 */ + XML_SCHEMAP_COS_CT_EXTENDS_1_3, /* 1800 */ + XML_SCHEMAV_NOROOT = 1801, + XML_SCHEMAV_UNDECLAREDELEM, /* 1802 */ + XML_SCHEMAV_NOTTOPLEVEL, /* 1803 */ + XML_SCHEMAV_MISSING, /* 1804 */ + XML_SCHEMAV_WRONGELEM, /* 1805 */ + XML_SCHEMAV_NOTYPE, /* 1806 */ + XML_SCHEMAV_NOROLLBACK, /* 1807 */ + XML_SCHEMAV_ISABSTRACT, /* 1808 */ + XML_SCHEMAV_NOTEMPTY, /* 1809 */ + XML_SCHEMAV_ELEMCONT, /* 1810 */ + XML_SCHEMAV_HAVEDEFAULT, /* 1811 */ + XML_SCHEMAV_NOTNILLABLE, /* 1812 */ + XML_SCHEMAV_EXTRACONTENT, /* 1813 */ + XML_SCHEMAV_INVALIDATTR, /* 1814 */ + XML_SCHEMAV_INVALIDELEM, /* 1815 */ + XML_SCHEMAV_NOTDETERMINIST, /* 1816 */ + XML_SCHEMAV_CONSTRUCT, /* 1817 */ + XML_SCHEMAV_INTERNAL, /* 1818 */ + XML_SCHEMAV_NOTSIMPLE, /* 1819 */ + XML_SCHEMAV_ATTRUNKNOWN, /* 1820 */ + XML_SCHEMAV_ATTRINVALID, /* 1821 */ + XML_SCHEMAV_VALUE, /* 1822 */ + XML_SCHEMAV_FACET, /* 1823 */ + XML_SCHEMAV_CVC_DATATYPE_VALID_1_2_1, /* 1824 */ + XML_SCHEMAV_CVC_DATATYPE_VALID_1_2_2, /* 1825 */ + XML_SCHEMAV_CVC_DATATYPE_VALID_1_2_3, /* 1826 */ + XML_SCHEMAV_CVC_TYPE_3_1_1, /* 1827 */ + XML_SCHEMAV_CVC_TYPE_3_1_2, /* 1828 */ + XML_SCHEMAV_CVC_FACET_VALID, /* 1829 */ + XML_SCHEMAV_CVC_LENGTH_VALID, /* 1830 */ + XML_SCHEMAV_CVC_MINLENGTH_VALID, /* 1831 */ + XML_SCHEMAV_CVC_MAXLENGTH_VALID, /* 1832 */ + XML_SCHEMAV_CVC_MININCLUSIVE_VALID, /* 1833 */ + XML_SCHEMAV_CVC_MAXINCLUSIVE_VALID, /* 1834 */ + XML_SCHEMAV_CVC_MINEXCLUSIVE_VALID, /* 1835 */ + XML_SCHEMAV_CVC_MAXEXCLUSIVE_VALID, /* 1836 */ + XML_SCHEMAV_CVC_TOTALDIGITS_VALID, /* 1837 */ + XML_SCHEMAV_CVC_FRACTIONDIGITS_VALID, /* 1838 */ + XML_SCHEMAV_CVC_PATTERN_VALID, /* 1839 */ + XML_SCHEMAV_CVC_ENUMERATION_VALID, /* 1840 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_1, /* 1841 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_2, /* 1842 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_3, /* 1843 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_4, /* 1844 */ + XML_SCHEMAV_CVC_ELT_1, /* 1845 */ + XML_SCHEMAV_CVC_ELT_2, /* 1846 */ + XML_SCHEMAV_CVC_ELT_3_1, /* 1847 */ + XML_SCHEMAV_CVC_ELT_3_2_1, /* 1848 */ + XML_SCHEMAV_CVC_ELT_3_2_2, /* 1849 */ + XML_SCHEMAV_CVC_ELT_4_1, /* 1850 */ + XML_SCHEMAV_CVC_ELT_4_2, /* 1851 */ + XML_SCHEMAV_CVC_ELT_4_3, /* 1852 */ + XML_SCHEMAV_CVC_ELT_5_1_1, /* 1853 */ + XML_SCHEMAV_CVC_ELT_5_1_2, /* 1854 */ + XML_SCHEMAV_CVC_ELT_5_2_1, /* 1855 */ + XML_SCHEMAV_CVC_ELT_5_2_2_1, /* 1856 */ + XML_SCHEMAV_CVC_ELT_5_2_2_2_1, /* 1857 */ + XML_SCHEMAV_CVC_ELT_5_2_2_2_2, /* 1858 */ + XML_SCHEMAV_CVC_ELT_6, /* 1859 */ + XML_SCHEMAV_CVC_ELT_7, /* 1860 */ + XML_SCHEMAV_CVC_ATTRIBUTE_1, /* 1861 */ + XML_SCHEMAV_CVC_ATTRIBUTE_2, /* 1862 */ + XML_SCHEMAV_CVC_ATTRIBUTE_3, /* 1863 */ + XML_SCHEMAV_CVC_ATTRIBUTE_4, /* 1864 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_3_1, /* 1865 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_3_2_1, /* 1866 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_3_2_2, /* 1867 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_4, /* 1868 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_5_1, /* 1869 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_5_2, /* 1870 */ + XML_SCHEMAV_ELEMENT_CONTENT, /* 1871 */ + XML_SCHEMAV_DOCUMENT_ELEMENT_MISSING, /* 1872 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_1, /* 1873 */ + XML_SCHEMAV_CVC_AU, /* 1874 */ + XML_SCHEMAV_CVC_TYPE_1, /* 1875 */ + XML_SCHEMAV_CVC_TYPE_2, /* 1876 */ + XML_SCHEMAV_CVC_IDC, /* 1877 */ + XML_SCHEMAV_CVC_WILDCARD, /* 1878 */ + XML_SCHEMAV_MISC, /* 1879 */ + XML_XPTR_UNKNOWN_SCHEME = 1900, + XML_XPTR_CHILDSEQ_START, /* 1901 */ + XML_XPTR_EVAL_FAILED, /* 1902 */ + XML_XPTR_EXTRA_OBJECTS, /* 1903 */ + XML_C14N_CREATE_CTXT = 1950, + XML_C14N_REQUIRES_UTF8, /* 1951 */ + XML_C14N_CREATE_STACK, /* 1952 */ + XML_C14N_INVALID_NODE, /* 1953 */ + XML_C14N_UNKNOW_NODE, /* 1954 */ + XML_C14N_RELATIVE_NAMESPACE, /* 1955 */ + XML_FTP_PASV_ANSWER = 2000, + XML_FTP_EPSV_ANSWER, /* 2001 */ + XML_FTP_ACCNT, /* 2002 */ + XML_FTP_URL_SYNTAX, /* 2003 */ + XML_HTTP_URL_SYNTAX = 2020, + XML_HTTP_USE_IP, /* 2021 */ + XML_HTTP_UNKNOWN_HOST, /* 2022 */ + XML_SCHEMAP_SRC_SIMPLE_TYPE_1 = 3000, + XML_SCHEMAP_SRC_SIMPLE_TYPE_2, /* 3001 */ + XML_SCHEMAP_SRC_SIMPLE_TYPE_3, /* 3002 */ + XML_SCHEMAP_SRC_SIMPLE_TYPE_4, /* 3003 */ + XML_SCHEMAP_SRC_RESOLVE, /* 3004 */ + XML_SCHEMAP_SRC_RESTRICTION_BASE_OR_SIMPLETYPE, /* 3005 */ + XML_SCHEMAP_SRC_LIST_ITEMTYPE_OR_SIMPLETYPE, /* 3006 */ + XML_SCHEMAP_SRC_UNION_MEMBERTYPES_OR_SIMPLETYPES, /* 3007 */ + XML_SCHEMAP_ST_PROPS_CORRECT_1, /* 3008 */ + XML_SCHEMAP_ST_PROPS_CORRECT_2, /* 3009 */ + XML_SCHEMAP_ST_PROPS_CORRECT_3, /* 3010 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_1, /* 3011 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_2, /* 3012 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_3_1, /* 3013 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_3_2, /* 3014 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_1, /* 3015 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_1_1, /* 3016 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_1_2, /* 3017 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_1, /* 3018 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_2, /* 3019 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_3, /* 3020 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_4, /* 3021 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_5, /* 3022 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_1, /* 3023 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_1, /* 3024 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_1_2, /* 3025 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_2, /* 3026 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_1, /* 3027 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_3, /* 3028 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_4, /* 3029 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_5, /* 3030 */ + XML_SCHEMAP_COS_ST_DERIVED_OK_2_1, /* 3031 */ + XML_SCHEMAP_COS_ST_DERIVED_OK_2_2, /* 3032 */ + XML_SCHEMAP_S4S_ELEM_NOT_ALLOWED, /* 3033 */ + XML_SCHEMAP_S4S_ELEM_MISSING, /* 3034 */ + XML_SCHEMAP_S4S_ATTR_NOT_ALLOWED, /* 3035 */ + XML_SCHEMAP_S4S_ATTR_MISSING, /* 3036 */ + XML_SCHEMAP_S4S_ATTR_INVALID_VALUE, /* 3037 */ + XML_SCHEMAP_SRC_ELEMENT_1, /* 3038 */ + XML_SCHEMAP_SRC_ELEMENT_2_1, /* 3039 */ + XML_SCHEMAP_SRC_ELEMENT_2_2, /* 3040 */ + XML_SCHEMAP_SRC_ELEMENT_3, /* 3041 */ + XML_SCHEMAP_P_PROPS_CORRECT_1, /* 3042 */ + XML_SCHEMAP_P_PROPS_CORRECT_2_1, /* 3043 */ + XML_SCHEMAP_P_PROPS_CORRECT_2_2, /* 3044 */ + XML_SCHEMAP_E_PROPS_CORRECT_2, /* 3045 */ + XML_SCHEMAP_E_PROPS_CORRECT_3, /* 3046 */ + XML_SCHEMAP_E_PROPS_CORRECT_4, /* 3047 */ + XML_SCHEMAP_E_PROPS_CORRECT_5, /* 3048 */ + XML_SCHEMAP_E_PROPS_CORRECT_6, /* 3049 */ + XML_SCHEMAP_SRC_INCLUDE, /* 3050 */ + XML_SCHEMAP_SRC_ATTRIBUTE_1, /* 3051 */ + XML_SCHEMAP_SRC_ATTRIBUTE_2, /* 3052 */ + XML_SCHEMAP_SRC_ATTRIBUTE_3_1, /* 3053 */ + XML_SCHEMAP_SRC_ATTRIBUTE_3_2, /* 3054 */ + XML_SCHEMAP_SRC_ATTRIBUTE_4, /* 3055 */ + XML_SCHEMAP_NO_XMLNS, /* 3056 */ + XML_SCHEMAP_NO_XSI, /* 3057 */ + XML_SCHEMAP_COS_VALID_DEFAULT_1, /* 3058 */ + XML_SCHEMAP_COS_VALID_DEFAULT_2_1, /* 3059 */ + XML_SCHEMAP_COS_VALID_DEFAULT_2_2_1, /* 3060 */ + XML_SCHEMAP_COS_VALID_DEFAULT_2_2_2, /* 3061 */ + XML_SCHEMAP_CVC_SIMPLE_TYPE, /* 3062 */ + XML_SCHEMAP_COS_CT_EXTENDS_1_1, /* 3063 */ + XML_SCHEMAP_SRC_IMPORT_1_1, /* 3064 */ + XML_SCHEMAP_SRC_IMPORT_1_2, /* 3065 */ + XML_SCHEMAP_SRC_IMPORT_2, /* 3066 */ + XML_SCHEMAP_SRC_IMPORT_2_1, /* 3067 */ + XML_SCHEMAP_SRC_IMPORT_2_2, /* 3068 */ + XML_SCHEMAP_INTERNAL, /* 3069 non-W3C */ + XML_SCHEMAP_NOT_DETERMINISTIC, /* 3070 non-W3C */ + XML_SCHEMAP_SRC_ATTRIBUTE_GROUP_1, /* 3071 */ + XML_SCHEMAP_SRC_ATTRIBUTE_GROUP_2, /* 3072 */ + XML_SCHEMAP_SRC_ATTRIBUTE_GROUP_3, /* 3073 */ + XML_SCHEMAP_MG_PROPS_CORRECT_1, /* 3074 */ + XML_SCHEMAP_MG_PROPS_CORRECT_2, /* 3075 */ + XML_SCHEMAP_SRC_CT_1, /* 3076 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_1_3, /* 3077 */ + XML_SCHEMAP_AU_PROPS_CORRECT_2, /* 3078 */ + XML_SCHEMAP_A_PROPS_CORRECT_2, /* 3079 */ + XML_SCHEMAP_C_PROPS_CORRECT, /* 3080 */ + XML_SCHEMAP_SRC_REDEFINE, /* 3081 */ + XML_SCHEMAP_SRC_IMPORT, /* 3082 */ + XML_SCHEMAP_WARN_SKIP_SCHEMA, /* 3083 */ + XML_SCHEMAP_WARN_UNLOCATED_SCHEMA, /* 3084 */ + XML_SCHEMAP_WARN_ATTR_REDECL_PROH, /* 3085 */ + XML_SCHEMAP_WARN_ATTR_POINTLESS_PROH, /* 3085 */ + XML_SCHEMAP_AG_PROPS_CORRECT, /* 3086 */ + XML_SCHEMAP_COS_CT_EXTENDS_1_2, /* 3087 */ + XML_SCHEMAP_AU_PROPS_CORRECT, /* 3088 */ + XML_SCHEMAP_A_PROPS_CORRECT_3, /* 3089 */ + XML_SCHEMAP_COS_ALL_LIMITED, /* 3090 */ + XML_SCHEMATRONV_ASSERT = 4000, /* 4000 */ + XML_SCHEMATRONV_REPORT, + XML_MODULE_OPEN = 4900, /* 4900 */ + XML_MODULE_CLOSE, /* 4901 */ + XML_CHECK_FOUND_ELEMENT = 5000, + XML_CHECK_FOUND_ATTRIBUTE, /* 5001 */ + XML_CHECK_FOUND_TEXT, /* 5002 */ + XML_CHECK_FOUND_CDATA, /* 5003 */ + XML_CHECK_FOUND_ENTITYREF, /* 5004 */ + XML_CHECK_FOUND_ENTITY, /* 5005 */ + XML_CHECK_FOUND_PI, /* 5006 */ + XML_CHECK_FOUND_COMMENT, /* 5007 */ + XML_CHECK_FOUND_DOCTYPE, /* 5008 */ + XML_CHECK_FOUND_FRAGMENT, /* 5009 */ + XML_CHECK_FOUND_NOTATION, /* 5010 */ + XML_CHECK_UNKNOWN_NODE, /* 5011 */ + XML_CHECK_ENTITY_TYPE, /* 5012 */ + XML_CHECK_NO_PARENT, /* 5013 */ + XML_CHECK_NO_DOC, /* 5014 */ + XML_CHECK_NO_NAME, /* 5015 */ + XML_CHECK_NO_ELEM, /* 5016 */ + XML_CHECK_WRONG_DOC, /* 5017 */ + XML_CHECK_NO_PREV, /* 5018 */ + XML_CHECK_WRONG_PREV, /* 5019 */ + XML_CHECK_NO_NEXT, /* 5020 */ + XML_CHECK_WRONG_NEXT, /* 5021 */ + XML_CHECK_NOT_DTD, /* 5022 */ + XML_CHECK_NOT_ATTR, /* 5023 */ + XML_CHECK_NOT_ATTR_DECL, /* 5024 */ + XML_CHECK_NOT_ELEM_DECL, /* 5025 */ + XML_CHECK_NOT_ENTITY_DECL, /* 5026 */ + XML_CHECK_NOT_NS_DECL, /* 5027 */ + XML_CHECK_NO_HREF, /* 5028 */ + XML_CHECK_WRONG_PARENT,/* 5029 */ + XML_CHECK_NS_SCOPE, /* 5030 */ + XML_CHECK_NS_ANCESTOR, /* 5031 */ + XML_CHECK_NOT_UTF8, /* 5032 */ + XML_CHECK_NO_DICT, /* 5033 */ + XML_CHECK_NOT_NCNAME, /* 5034 */ + XML_CHECK_OUTSIDE_DICT, /* 5035 */ + XML_CHECK_WRONG_NAME, /* 5036 */ + XML_CHECK_NAME_NOT_NULL, /* 5037 */ + XML_I18N_NO_NAME = 6000, + XML_I18N_NO_HANDLER, /* 6001 */ + XML_I18N_EXCESS_HANDLER, /* 6002 */ + XML_I18N_CONV_FAILED, /* 6003 */ + XML_I18N_NO_OUTPUT, /* 6004 */ + XML_BUF_OVERFLOW = 7000 +} xmlParserErrors; + +/** + * xmlGenericErrorFunc: + * @ctx: a parsing context + * @msg: the message + * @...: the extra arguments of the varargs to format the message + * + * Signature of the function to use when there is an error and + * no parsing or validity context available . + */ +typedef void (*xmlGenericErrorFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +/** + * xmlStructuredErrorFunc: + * @userData: user provided data for the error callback + * @error: the error being raised. + * + * Signature of the function to use when there is an error and + * the module handles the new error reporting mechanism. + */ +typedef void (*xmlStructuredErrorFunc) (void *userData, const xmlError *error); + +/** DOC_DISABLE */ +#define XML_GLOBALS_ERROR \ + XML_OP(xmlLastError, xmlError, XML_DEPRECATED) \ + XML_OP(xmlGenericError, xmlGenericErrorFunc, XML_NO_ATTR) \ + XML_OP(xmlGenericErrorContext, void *, XML_NO_ATTR) \ + XML_OP(xmlStructuredError, xmlStructuredErrorFunc, XML_NO_ATTR) \ + XML_OP(xmlStructuredErrorContext, void *, XML_NO_ATTR) + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_ERROR +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlLastError XML_GLOBAL_MACRO(xmlLastError) + #define xmlGenericError XML_GLOBAL_MACRO(xmlGenericError) + #define xmlGenericErrorContext XML_GLOBAL_MACRO(xmlGenericErrorContext) + #define xmlStructuredError XML_GLOBAL_MACRO(xmlStructuredError) + #define xmlStructuredErrorContext XML_GLOBAL_MACRO(xmlStructuredErrorContext) +#endif +/** DOC_ENABLE */ + +/* + * Use the following function to reset the two global variables + * xmlGenericError and xmlGenericErrorContext. + */ +XMLPUBFUN void + xmlSetGenericErrorFunc (void *ctx, + xmlGenericErrorFunc handler); +XML_DEPRECATED +XMLPUBFUN void + xmlThrDefSetGenericErrorFunc(void *ctx, + xmlGenericErrorFunc handler); +XML_DEPRECATED +XMLPUBFUN void + initGenericErrorDefaultFunc (xmlGenericErrorFunc *handler); + +XMLPUBFUN void + xmlSetStructuredErrorFunc (void *ctx, + xmlStructuredErrorFunc handler); +XML_DEPRECATED +XMLPUBFUN void + xmlThrDefSetStructuredErrorFunc(void *ctx, + xmlStructuredErrorFunc handler); +/* + * Default message routines used by SAX and Valid context for error + * and warning reporting. + */ +XMLPUBFUN void + xmlParserError (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +XMLPUBFUN void + xmlParserWarning (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +XMLPUBFUN void + xmlParserValidityError (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +XMLPUBFUN void + xmlParserValidityWarning (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +/** DOC_DISABLE */ +struct _xmlParserInput; +/** DOC_ENABLE */ +XMLPUBFUN void + xmlParserPrintFileInfo (struct _xmlParserInput *input); +XMLPUBFUN void + xmlParserPrintFileContext (struct _xmlParserInput *input); +XMLPUBFUN void +xmlFormatError (const xmlError *err, + xmlGenericErrorFunc channel, + void *data); + +/* + * Extended error information routines + */ +XMLPUBFUN const xmlError * + xmlGetLastError (void); +XMLPUBFUN void + xmlResetLastError (void); +XMLPUBFUN const xmlError * + xmlCtxtGetLastError (void *ctx); +XMLPUBFUN void + xmlCtxtResetLastError (void *ctx); +XMLPUBFUN void + xmlResetError (xmlErrorPtr err); +XMLPUBFUN int + xmlCopyError (const xmlError *from, + xmlErrorPtr to); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_ERROR_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlexports.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlexports.h new file mode 100644 index 000000000..3c1d83f49 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlexports.h @@ -0,0 +1,146 @@ +/* + * Summary: macros for marking symbols as exportable/importable. + * Description: macros for marking symbols as exportable/importable. + * + * Copy: See Copyright for the status of this software. + */ + +#ifndef __XML_EXPORTS_H__ +#define __XML_EXPORTS_H__ + +/** DOC_DISABLE */ + +/* + * Symbol visibility + */ + +#if defined(_WIN32) || defined(__CYGWIN__) + #ifdef LIBXML_STATIC + #define XMLPUBLIC + #elif defined(IN_LIBXML) + #define XMLPUBLIC __declspec(dllexport) + #else + #define XMLPUBLIC __declspec(dllimport) + #endif +#else /* not Windows */ + #define XMLPUBLIC +#endif /* platform switch */ + +#define XMLPUBFUN XMLPUBLIC + +#define XMLPUBVAR XMLPUBLIC extern + +/* Compatibility */ +#define XMLCALL +#define XMLCDECL +#ifndef LIBXML_DLL_IMPORT + #define LIBXML_DLL_IMPORT XMLPUBVAR +#endif + +/* + * Attributes + */ + +#ifndef ATTRIBUTE_UNUSED + #if __GNUC__ * 100 + __GNUC_MINOR__ >= 207 || defined(__clang__) + #define ATTRIBUTE_UNUSED __attribute__((unused)) + #else + #define ATTRIBUTE_UNUSED + #endif +#endif + +#if !defined(__clang__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403) + #define LIBXML_ATTR_ALLOC_SIZE(x) __attribute__((alloc_size(x))) +#else + #define LIBXML_ATTR_ALLOC_SIZE(x) +#endif + +#if __GNUC__ * 100 + __GNUC_MINOR__ >= 303 + #define LIBXML_ATTR_FORMAT(fmt,args) \ + __attribute__((__format__(__printf__,fmt,args))) +#else + #define LIBXML_ATTR_FORMAT(fmt,args) +#endif + +#ifndef XML_DEPRECATED + #if defined(IN_LIBXML) + #define XML_DEPRECATED + #elif __GNUC__ * 100 + __GNUC_MINOR__ >= 301 + #define XML_DEPRECATED __attribute__((deprecated)) + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + /* Available since Visual Studio 2005 */ + #define XML_DEPRECATED __declspec(deprecated) + #else + #define XML_DEPRECATED + #endif +#endif + +/* + * Warnings pragmas, should be moved from public headers + */ + +#if defined(__LCC__) + + #define XML_IGNORE_FPTR_CAST_WARNINGS + #define XML_POP_WARNINGS \ + _Pragma("diag_default 1215") + +#elif defined(__clang__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 406) + + #if defined(__clang__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 800) + #define XML_IGNORE_FPTR_CAST_WARNINGS \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wpedantic\"") \ + _Pragma("GCC diagnostic ignored \"-Wcast-function-type\"") + #else + #define XML_IGNORE_FPTR_CAST_WARNINGS \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wpedantic\"") + #endif + #define XML_POP_WARNINGS \ + _Pragma("GCC diagnostic pop") + +#elif defined(_MSC_VER) && _MSC_VER >= 1400 + + #define XML_IGNORE_FPTR_CAST_WARNINGS __pragma(warning(push)) + #define XML_POP_WARNINGS __pragma(warning(pop)) + +#else + + #define XML_IGNORE_FPTR_CAST_WARNINGS + #define XML_POP_WARNINGS + +#endif + +/* + * Accessors for globals + */ + +#define XML_NO_ATTR + +#ifdef LIBXML_THREAD_ENABLED + #define XML_DECLARE_GLOBAL(name, type, attrs) \ + attrs XMLPUBFUN type *__##name(void); + #define XML_GLOBAL_MACRO(name) (*__##name()) +#else + #define XML_DECLARE_GLOBAL(name, type, attrs) \ + attrs XMLPUBVAR type name; +#endif + +/* + * Originally declared in xmlversion.h which is generated + */ + +#ifdef __cplusplus +extern "C" { +#endif + +XMLPUBFUN void xmlCheckVersion(int version); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_EXPORTS_H__ */ + + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmemory.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmemory.h new file mode 100644 index 000000000..1de3e9fce --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmemory.h @@ -0,0 +1,188 @@ +/* + * Summary: interface for the memory allocator + * Description: provides interfaces for the memory allocator, + * including debugging capabilities. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __DEBUG_MEMORY_ALLOC__ +#define __DEBUG_MEMORY_ALLOC__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The XML memory wrapper support 4 basic overloadable functions. + */ +/** + * xmlFreeFunc: + * @mem: an already allocated block of memory + * + * Signature for a free() implementation. + */ +typedef void (*xmlFreeFunc)(void *mem); +/** + * xmlMallocFunc: + * @size: the size requested in bytes + * + * Signature for a malloc() implementation. + * + * Returns a pointer to the newly allocated block or NULL in case of error. + */ +typedef void *(LIBXML_ATTR_ALLOC_SIZE(1) *xmlMallocFunc)(size_t size); + +/** + * xmlReallocFunc: + * @mem: an already allocated block of memory + * @size: the new size requested in bytes + * + * Signature for a realloc() implementation. + * + * Returns a pointer to the newly reallocated block or NULL in case of error. + */ +typedef void *(*xmlReallocFunc)(void *mem, size_t size); + +/** + * xmlStrdupFunc: + * @str: a zero terminated string + * + * Signature for an strdup() implementation. + * + * Returns the copy of the string or NULL in case of error. + */ +typedef char *(*xmlStrdupFunc)(const char *str); + +/* + * In general the memory allocation entry points are not kept + * thread specific but this can be overridden by LIBXML_THREAD_ALLOC_ENABLED + * - xmlMalloc + * - xmlMallocAtomic + * - xmlRealloc + * - xmlMemStrdup + * - xmlFree + */ +/** DOC_DISABLE */ +#ifdef LIBXML_THREAD_ALLOC_ENABLED + #define XML_GLOBALS_ALLOC \ + XML_OP(xmlMalloc, xmlMallocFunc, XML_NO_ATTR) \ + XML_OP(xmlMallocAtomic, xmlMallocFunc, XML_NO_ATTR) \ + XML_OP(xmlRealloc, xmlReallocFunc, XML_NO_ATTR) \ + XML_OP(xmlFree, xmlFreeFunc, XML_NO_ATTR) \ + XML_OP(xmlMemStrdup, xmlStrdupFunc, XML_NO_ATTR) + #define XML_OP XML_DECLARE_GLOBAL + XML_GLOBALS_ALLOC + #undef XML_OP + #if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlMalloc XML_GLOBAL_MACRO(xmlMalloc) + #define xmlMallocAtomic XML_GLOBAL_MACRO(xmlMallocAtomic) + #define xmlRealloc XML_GLOBAL_MACRO(xmlRealloc) + #define xmlFree XML_GLOBAL_MACRO(xmlFree) + #define xmlMemStrdup XML_GLOBAL_MACRO(xmlMemStrdup) + #endif +#else + #define XML_GLOBALS_ALLOC +/** DOC_ENABLE */ + XMLPUBVAR xmlMallocFunc xmlMalloc; + XMLPUBVAR xmlMallocFunc xmlMallocAtomic; + XMLPUBVAR xmlReallocFunc xmlRealloc; + XMLPUBVAR xmlFreeFunc xmlFree; + XMLPUBVAR xmlStrdupFunc xmlMemStrdup; +#endif + +/* + * The way to overload the existing functions. + * The xmlGc function have an extra entry for atomic block + * allocations useful for garbage collected memory allocators + */ +XMLPUBFUN int + xmlMemSetup (xmlFreeFunc freeFunc, + xmlMallocFunc mallocFunc, + xmlReallocFunc reallocFunc, + xmlStrdupFunc strdupFunc); +XMLPUBFUN int + xmlMemGet (xmlFreeFunc *freeFunc, + xmlMallocFunc *mallocFunc, + xmlReallocFunc *reallocFunc, + xmlStrdupFunc *strdupFunc); +XMLPUBFUN int + xmlGcMemSetup (xmlFreeFunc freeFunc, + xmlMallocFunc mallocFunc, + xmlMallocFunc mallocAtomicFunc, + xmlReallocFunc reallocFunc, + xmlStrdupFunc strdupFunc); +XMLPUBFUN int + xmlGcMemGet (xmlFreeFunc *freeFunc, + xmlMallocFunc *mallocFunc, + xmlMallocFunc *mallocAtomicFunc, + xmlReallocFunc *reallocFunc, + xmlStrdupFunc *strdupFunc); + +/* + * Initialization of the memory layer. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlInitMemory (void); + +/* + * Cleanup of the memory layer. + */ +XML_DEPRECATED +XMLPUBFUN void + xmlCleanupMemory (void); +/* + * These are specific to the XML debug memory wrapper. + */ +XMLPUBFUN size_t + xmlMemSize (void *ptr); +XMLPUBFUN int + xmlMemUsed (void); +XMLPUBFUN int + xmlMemBlocks (void); +XML_DEPRECATED +XMLPUBFUN void + xmlMemDisplay (FILE *fp); +XML_DEPRECATED +XMLPUBFUN void + xmlMemDisplayLast(FILE *fp, long nbBytes); +XML_DEPRECATED +XMLPUBFUN void + xmlMemShow (FILE *fp, int nr); +XML_DEPRECATED +XMLPUBFUN void + xmlMemoryDump (void); +XMLPUBFUN void * + xmlMemMalloc (size_t size) LIBXML_ATTR_ALLOC_SIZE(1); +XMLPUBFUN void * + xmlMemRealloc (void *ptr,size_t size); +XMLPUBFUN void + xmlMemFree (void *ptr); +XMLPUBFUN char * + xmlMemoryStrdup (const char *str); +XML_DEPRECATED +XMLPUBFUN void * + xmlMallocLoc (size_t size, const char *file, int line) LIBXML_ATTR_ALLOC_SIZE(1); +XML_DEPRECATED +XMLPUBFUN void * + xmlReallocLoc (void *ptr, size_t size, const char *file, int line); +XML_DEPRECATED +XMLPUBFUN void * + xmlMallocAtomicLoc (size_t size, const char *file, int line) LIBXML_ATTR_ALLOC_SIZE(1); +XML_DEPRECATED +XMLPUBFUN char * + xmlMemStrdupLoc (const char *str, const char *file, int line); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* __DEBUG_MEMORY_ALLOC__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmodule.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmodule.h new file mode 100644 index 000000000..279986c1a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlmodule.h @@ -0,0 +1,57 @@ +/* + * Summary: dynamic module loading + * Description: basic API for dynamic module loading, used by + * libexslt added in 2.6.17 + * + * Copy: See Copyright for the status of this software. + * + * Author: Joel W. Reed + */ + +#ifndef __XML_MODULE_H__ +#define __XML_MODULE_H__ + +#include + +#ifdef LIBXML_MODULES_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlModulePtr: + * + * A handle to a dynamically loaded module + */ +typedef struct _xmlModule xmlModule; +typedef xmlModule *xmlModulePtr; + +/** + * xmlModuleOption: + * + * enumeration of options that can be passed down to xmlModuleOpen() + */ +typedef enum { + XML_MODULE_LAZY = 1, /* lazy binding */ + XML_MODULE_LOCAL= 2 /* local binding */ +} xmlModuleOption; + +XMLPUBFUN xmlModulePtr xmlModuleOpen (const char *filename, + int options); + +XMLPUBFUN int xmlModuleSymbol (xmlModulePtr module, + const char* name, + void **result); + +XMLPUBFUN int xmlModuleClose (xmlModulePtr module); + +XMLPUBFUN int xmlModuleFree (xmlModulePtr module); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_MODULES_ENABLED */ + +#endif /*__XML_MODULE_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlreader.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlreader.h new file mode 100644 index 000000000..5d4fc5d5a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlreader.h @@ -0,0 +1,436 @@ +/* + * Summary: the XMLReader implementation + * Description: API of the XML streaming API based on C# interfaces. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XMLREADER_H__ +#define __XML_XMLREADER_H__ + +#include +#include +#include +#include +#ifdef LIBXML_SCHEMAS_ENABLED +#include +#include +#endif +/* for compatibility */ +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlParserSeverities: + * + * How severe an error callback is when the per-reader error callback API + * is used. + */ +typedef enum { + XML_PARSER_SEVERITY_VALIDITY_WARNING = 1, + XML_PARSER_SEVERITY_VALIDITY_ERROR = 2, + XML_PARSER_SEVERITY_WARNING = 3, + XML_PARSER_SEVERITY_ERROR = 4 +} xmlParserSeverities; + +#ifdef LIBXML_READER_ENABLED + +/** + * xmlTextReaderMode: + * + * Internal state values for the reader. + */ +typedef enum { + XML_TEXTREADER_MODE_INITIAL = 0, + XML_TEXTREADER_MODE_INTERACTIVE = 1, + XML_TEXTREADER_MODE_ERROR = 2, + XML_TEXTREADER_MODE_EOF =3, + XML_TEXTREADER_MODE_CLOSED = 4, + XML_TEXTREADER_MODE_READING = 5 +} xmlTextReaderMode; + +/** + * xmlParserProperties: + * + * Some common options to use with xmlTextReaderSetParserProp, but it + * is better to use xmlParserOption and the xmlReaderNewxxx and + * xmlReaderForxxx APIs now. + */ +typedef enum { + XML_PARSER_LOADDTD = 1, + XML_PARSER_DEFAULTATTRS = 2, + XML_PARSER_VALIDATE = 3, + XML_PARSER_SUBST_ENTITIES = 4 +} xmlParserProperties; + +/** + * xmlReaderTypes: + * + * Predefined constants for the different types of nodes. + */ +typedef enum { + XML_READER_TYPE_NONE = 0, + XML_READER_TYPE_ELEMENT = 1, + XML_READER_TYPE_ATTRIBUTE = 2, + XML_READER_TYPE_TEXT = 3, + XML_READER_TYPE_CDATA = 4, + XML_READER_TYPE_ENTITY_REFERENCE = 5, + XML_READER_TYPE_ENTITY = 6, + XML_READER_TYPE_PROCESSING_INSTRUCTION = 7, + XML_READER_TYPE_COMMENT = 8, + XML_READER_TYPE_DOCUMENT = 9, + XML_READER_TYPE_DOCUMENT_TYPE = 10, + XML_READER_TYPE_DOCUMENT_FRAGMENT = 11, + XML_READER_TYPE_NOTATION = 12, + XML_READER_TYPE_WHITESPACE = 13, + XML_READER_TYPE_SIGNIFICANT_WHITESPACE = 14, + XML_READER_TYPE_END_ELEMENT = 15, + XML_READER_TYPE_END_ENTITY = 16, + XML_READER_TYPE_XML_DECLARATION = 17 +} xmlReaderTypes; + +/** + * xmlTextReader: + * + * Structure for an xmlReader context. + */ +typedef struct _xmlTextReader xmlTextReader; + +/** + * xmlTextReaderPtr: + * + * Pointer to an xmlReader context. + */ +typedef xmlTextReader *xmlTextReaderPtr; + +/* + * Constructors & Destructor + */ +XMLPUBFUN xmlTextReaderPtr + xmlNewTextReader (xmlParserInputBufferPtr input, + const char *URI); +XMLPUBFUN xmlTextReaderPtr + xmlNewTextReaderFilename(const char *URI); + +XMLPUBFUN void + xmlFreeTextReader (xmlTextReaderPtr reader); + +XMLPUBFUN int + xmlTextReaderSetup(xmlTextReaderPtr reader, + xmlParserInputBufferPtr input, const char *URL, + const char *encoding, int options); +XMLPUBFUN void + xmlTextReaderSetMaxAmplification(xmlTextReaderPtr reader, + unsigned maxAmpl); +XMLPUBFUN const xmlError * + xmlTextReaderGetLastError(xmlTextReaderPtr reader); + +/* + * Iterators + */ +XMLPUBFUN int + xmlTextReaderRead (xmlTextReaderPtr reader); + +#ifdef LIBXML_WRITER_ENABLED +XMLPUBFUN xmlChar * + xmlTextReaderReadInnerXml(xmlTextReaderPtr reader); + +XMLPUBFUN xmlChar * + xmlTextReaderReadOuterXml(xmlTextReaderPtr reader); +#endif + +XMLPUBFUN xmlChar * + xmlTextReaderReadString (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderReadAttributeValue(xmlTextReaderPtr reader); + +/* + * Attributes of the node + */ +XMLPUBFUN int + xmlTextReaderAttributeCount(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderDepth (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderHasAttributes(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderHasValue(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderIsDefault (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderIsEmptyElement(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderNodeType (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderQuoteChar (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderReadState (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderIsNamespaceDecl(xmlTextReaderPtr reader); + +XMLPUBFUN const xmlChar * + xmlTextReaderConstBaseUri (xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstLocalName (xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstName (xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstNamespaceUri(xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstPrefix (xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstXmlLang (xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstString (xmlTextReaderPtr reader, + const xmlChar *str); +XMLPUBFUN const xmlChar * + xmlTextReaderConstValue (xmlTextReaderPtr reader); + +/* + * use the Const version of the routine for + * better performance and simpler code + */ +XMLPUBFUN xmlChar * + xmlTextReaderBaseUri (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderLocalName (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderName (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderNamespaceUri(xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderPrefix (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderXmlLang (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderValue (xmlTextReaderPtr reader); + +/* + * Methods of the XmlTextReader + */ +XMLPUBFUN int + xmlTextReaderClose (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderGetAttributeNo (xmlTextReaderPtr reader, + int no); +XMLPUBFUN xmlChar * + xmlTextReaderGetAttribute (xmlTextReaderPtr reader, + const xmlChar *name); +XMLPUBFUN xmlChar * + xmlTextReaderGetAttributeNs (xmlTextReaderPtr reader, + const xmlChar *localName, + const xmlChar *namespaceURI); +XMLPUBFUN xmlParserInputBufferPtr + xmlTextReaderGetRemainder (xmlTextReaderPtr reader); +XMLPUBFUN xmlChar * + xmlTextReaderLookupNamespace(xmlTextReaderPtr reader, + const xmlChar *prefix); +XMLPUBFUN int + xmlTextReaderMoveToAttributeNo(xmlTextReaderPtr reader, + int no); +XMLPUBFUN int + xmlTextReaderMoveToAttribute(xmlTextReaderPtr reader, + const xmlChar *name); +XMLPUBFUN int + xmlTextReaderMoveToAttributeNs(xmlTextReaderPtr reader, + const xmlChar *localName, + const xmlChar *namespaceURI); +XMLPUBFUN int + xmlTextReaderMoveToFirstAttribute(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderMoveToNextAttribute(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderMoveToElement (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderNormalization (xmlTextReaderPtr reader); +XMLPUBFUN const xmlChar * + xmlTextReaderConstEncoding (xmlTextReaderPtr reader); + +/* + * Extensions + */ +XMLPUBFUN int + xmlTextReaderSetParserProp (xmlTextReaderPtr reader, + int prop, + int value); +XMLPUBFUN int + xmlTextReaderGetParserProp (xmlTextReaderPtr reader, + int prop); +XMLPUBFUN xmlNodePtr + xmlTextReaderCurrentNode (xmlTextReaderPtr reader); + +XMLPUBFUN int + xmlTextReaderGetParserLineNumber(xmlTextReaderPtr reader); + +XMLPUBFUN int + xmlTextReaderGetParserColumnNumber(xmlTextReaderPtr reader); + +XMLPUBFUN xmlNodePtr + xmlTextReaderPreserve (xmlTextReaderPtr reader); +#ifdef LIBXML_PATTERN_ENABLED +XMLPUBFUN int + xmlTextReaderPreservePattern(xmlTextReaderPtr reader, + const xmlChar *pattern, + const xmlChar **namespaces); +#endif /* LIBXML_PATTERN_ENABLED */ +XMLPUBFUN xmlDocPtr + xmlTextReaderCurrentDoc (xmlTextReaderPtr reader); +XMLPUBFUN xmlNodePtr + xmlTextReaderExpand (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderNext (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderNextSibling (xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderIsValid (xmlTextReaderPtr reader); +#ifdef LIBXML_SCHEMAS_ENABLED +XMLPUBFUN int + xmlTextReaderRelaxNGValidate(xmlTextReaderPtr reader, + const char *rng); +XMLPUBFUN int + xmlTextReaderRelaxNGValidateCtxt(xmlTextReaderPtr reader, + xmlRelaxNGValidCtxtPtr ctxt, + int options); + +XMLPUBFUN int + xmlTextReaderRelaxNGSetSchema(xmlTextReaderPtr reader, + xmlRelaxNGPtr schema); +XMLPUBFUN int + xmlTextReaderSchemaValidate (xmlTextReaderPtr reader, + const char *xsd); +XMLPUBFUN int + xmlTextReaderSchemaValidateCtxt(xmlTextReaderPtr reader, + xmlSchemaValidCtxtPtr ctxt, + int options); +XMLPUBFUN int + xmlTextReaderSetSchema (xmlTextReaderPtr reader, + xmlSchemaPtr schema); +#endif +XMLPUBFUN const xmlChar * + xmlTextReaderConstXmlVersion(xmlTextReaderPtr reader); +XMLPUBFUN int + xmlTextReaderStandalone (xmlTextReaderPtr reader); + + +/* + * Index lookup + */ +XMLPUBFUN long + xmlTextReaderByteConsumed (xmlTextReaderPtr reader); + +/* + * New more complete APIs for simpler creation and reuse of readers + */ +XMLPUBFUN xmlTextReaderPtr + xmlReaderWalker (xmlDocPtr doc); +XMLPUBFUN xmlTextReaderPtr + xmlReaderForDoc (const xmlChar * cur, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlTextReaderPtr + xmlReaderForFile (const char *filename, + const char *encoding, + int options); +XMLPUBFUN xmlTextReaderPtr + xmlReaderForMemory (const char *buffer, + int size, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlTextReaderPtr + xmlReaderForFd (int fd, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN xmlTextReaderPtr + xmlReaderForIO (xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + const char *URL, + const char *encoding, + int options); + +XMLPUBFUN int + xmlReaderNewWalker (xmlTextReaderPtr reader, + xmlDocPtr doc); +XMLPUBFUN int + xmlReaderNewDoc (xmlTextReaderPtr reader, + const xmlChar * cur, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN int + xmlReaderNewFile (xmlTextReaderPtr reader, + const char *filename, + const char *encoding, + int options); +XMLPUBFUN int + xmlReaderNewMemory (xmlTextReaderPtr reader, + const char *buffer, + int size, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN int + xmlReaderNewFd (xmlTextReaderPtr reader, + int fd, + const char *URL, + const char *encoding, + int options); +XMLPUBFUN int + xmlReaderNewIO (xmlTextReaderPtr reader, + xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + const char *URL, + const char *encoding, + int options); +/* + * Error handling extensions + */ +typedef void * xmlTextReaderLocatorPtr; + +/** + * xmlTextReaderErrorFunc: + * @arg: the user argument + * @msg: the message + * @severity: the severity of the error + * @locator: a locator indicating where the error occurred + * + * Signature of an error callback from a reader parser + */ +typedef void (*xmlTextReaderErrorFunc)(void *arg, + const char *msg, + xmlParserSeverities severity, + xmlTextReaderLocatorPtr locator); +XMLPUBFUN int + xmlTextReaderLocatorLineNumber(xmlTextReaderLocatorPtr locator); +XMLPUBFUN xmlChar * + xmlTextReaderLocatorBaseURI (xmlTextReaderLocatorPtr locator); +XMLPUBFUN void + xmlTextReaderSetErrorHandler(xmlTextReaderPtr reader, + xmlTextReaderErrorFunc f, + void *arg); +XMLPUBFUN void + xmlTextReaderSetStructuredErrorHandler(xmlTextReaderPtr reader, + xmlStructuredErrorFunc f, + void *arg); +XMLPUBFUN void + xmlTextReaderGetErrorHandler(xmlTextReaderPtr reader, + xmlTextReaderErrorFunc *f, + void **arg); + +#endif /* LIBXML_READER_ENABLED */ + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XMLREADER_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlregexp.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlregexp.h new file mode 100644 index 000000000..2d66437a5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlregexp.h @@ -0,0 +1,215 @@ +/* + * Summary: regular expressions handling + * Description: basic API for libxml regular expressions handling used + * for XML Schemas and validation. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_REGEXP_H__ +#define __XML_REGEXP_H__ + +#include +#include +#include + +#ifdef LIBXML_REGEXP_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlRegexpPtr: + * + * A libxml regular expression, they can actually be far more complex + * thank the POSIX regex expressions. + */ +typedef struct _xmlRegexp xmlRegexp; +typedef xmlRegexp *xmlRegexpPtr; + +/** + * xmlRegExecCtxtPtr: + * + * A libxml progressive regular expression evaluation context + */ +typedef struct _xmlRegExecCtxt xmlRegExecCtxt; +typedef xmlRegExecCtxt *xmlRegExecCtxtPtr; + +/* + * The POSIX like API + */ +XMLPUBFUN xmlRegexpPtr + xmlRegexpCompile (const xmlChar *regexp); +XMLPUBFUN void xmlRegFreeRegexp(xmlRegexpPtr regexp); +XMLPUBFUN int + xmlRegexpExec (xmlRegexpPtr comp, + const xmlChar *value); +XMLPUBFUN void + xmlRegexpPrint (FILE *output, + xmlRegexpPtr regexp); +XMLPUBFUN int + xmlRegexpIsDeterminist(xmlRegexpPtr comp); + +/** + * xmlRegExecCallbacks: + * @exec: the regular expression context + * @token: the current token string + * @transdata: transition data + * @inputdata: input data + * + * Callback function when doing a transition in the automata + */ +typedef void (*xmlRegExecCallbacks) (xmlRegExecCtxtPtr exec, + const xmlChar *token, + void *transdata, + void *inputdata); + +/* + * The progressive API + */ +XMLPUBFUN xmlRegExecCtxtPtr + xmlRegNewExecCtxt (xmlRegexpPtr comp, + xmlRegExecCallbacks callback, + void *data); +XMLPUBFUN void + xmlRegFreeExecCtxt (xmlRegExecCtxtPtr exec); +XMLPUBFUN int + xmlRegExecPushString(xmlRegExecCtxtPtr exec, + const xmlChar *value, + void *data); +XMLPUBFUN int + xmlRegExecPushString2(xmlRegExecCtxtPtr exec, + const xmlChar *value, + const xmlChar *value2, + void *data); + +XMLPUBFUN int + xmlRegExecNextValues(xmlRegExecCtxtPtr exec, + int *nbval, + int *nbneg, + xmlChar **values, + int *terminal); +XMLPUBFUN int + xmlRegExecErrInfo (xmlRegExecCtxtPtr exec, + const xmlChar **string, + int *nbval, + int *nbneg, + xmlChar **values, + int *terminal); +#ifdef LIBXML_EXPR_ENABLED +/* + * Formal regular expression handling + * Its goal is to do some formal work on content models + */ + +/* expressions are used within a context */ +typedef struct _xmlExpCtxt xmlExpCtxt; +typedef xmlExpCtxt *xmlExpCtxtPtr; + +XMLPUBFUN void + xmlExpFreeCtxt (xmlExpCtxtPtr ctxt); +XMLPUBFUN xmlExpCtxtPtr + xmlExpNewCtxt (int maxNodes, + xmlDictPtr dict); + +XMLPUBFUN int + xmlExpCtxtNbNodes(xmlExpCtxtPtr ctxt); +XMLPUBFUN int + xmlExpCtxtNbCons(xmlExpCtxtPtr ctxt); + +/* Expressions are trees but the tree is opaque */ +typedef struct _xmlExpNode xmlExpNode; +typedef xmlExpNode *xmlExpNodePtr; + +typedef enum { + XML_EXP_EMPTY = 0, + XML_EXP_FORBID = 1, + XML_EXP_ATOM = 2, + XML_EXP_SEQ = 3, + XML_EXP_OR = 4, + XML_EXP_COUNT = 5 +} xmlExpNodeType; + +/* + * 2 core expressions shared by all for the empty language set + * and for the set with just the empty token + */ +XMLPUBVAR xmlExpNodePtr forbiddenExp; +XMLPUBVAR xmlExpNodePtr emptyExp; + +/* + * Expressions are reference counted internally + */ +XMLPUBFUN void + xmlExpFree (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr); +XMLPUBFUN void + xmlExpRef (xmlExpNodePtr expr); + +/* + * constructors can be either manual or from a string + */ +XMLPUBFUN xmlExpNodePtr + xmlExpParse (xmlExpCtxtPtr ctxt, + const char *expr); +XMLPUBFUN xmlExpNodePtr + xmlExpNewAtom (xmlExpCtxtPtr ctxt, + const xmlChar *name, + int len); +XMLPUBFUN xmlExpNodePtr + xmlExpNewOr (xmlExpCtxtPtr ctxt, + xmlExpNodePtr left, + xmlExpNodePtr right); +XMLPUBFUN xmlExpNodePtr + xmlExpNewSeq (xmlExpCtxtPtr ctxt, + xmlExpNodePtr left, + xmlExpNodePtr right); +XMLPUBFUN xmlExpNodePtr + xmlExpNewRange (xmlExpCtxtPtr ctxt, + xmlExpNodePtr subset, + int min, + int max); +/* + * The really interesting APIs + */ +XMLPUBFUN int + xmlExpIsNillable(xmlExpNodePtr expr); +XMLPUBFUN int + xmlExpMaxToken (xmlExpNodePtr expr); +XMLPUBFUN int + xmlExpGetLanguage(xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + const xmlChar**langList, + int len); +XMLPUBFUN int + xmlExpGetStart (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + const xmlChar**tokList, + int len); +XMLPUBFUN xmlExpNodePtr + xmlExpStringDerive(xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + const xmlChar *str, + int len); +XMLPUBFUN xmlExpNodePtr + xmlExpExpDerive (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + xmlExpNodePtr sub); +XMLPUBFUN int + xmlExpSubsume (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + xmlExpNodePtr sub); +XMLPUBFUN void + xmlExpDump (xmlBufferPtr buf, + xmlExpNodePtr expr); +#endif /* LIBXML_EXPR_ENABLED */ +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_REGEXP_ENABLED */ + +#endif /*__XML_REGEXP_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlsave.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlsave.h new file mode 100644 index 000000000..e266e467c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlsave.h @@ -0,0 +1,102 @@ +/* + * Summary: the XML document serializer + * Description: API to save document or subtree of document + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XMLSAVE_H__ +#define __XML_XMLSAVE_H__ + +#include +#include +#include +#include + +#ifdef LIBXML_OUTPUT_ENABLED +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlSaveOption: + * + * This is the set of XML save options that can be passed down + * to the xmlSaveToFd() and similar calls. + */ +typedef enum { + XML_SAVE_FORMAT = 1<<0, /* format save output */ + XML_SAVE_NO_DECL = 1<<1, /* drop the xml declaration */ + XML_SAVE_NO_EMPTY = 1<<2, /* no empty tags */ + XML_SAVE_NO_XHTML = 1<<3, /* disable XHTML1 specific rules */ + XML_SAVE_XHTML = 1<<4, /* force XHTML1 specific rules */ + XML_SAVE_AS_XML = 1<<5, /* force XML serialization on HTML doc */ + XML_SAVE_AS_HTML = 1<<6, /* force HTML serialization on XML doc */ + XML_SAVE_WSNONSIG = 1<<7 /* format with non-significant whitespace */ +} xmlSaveOption; + + +typedef struct _xmlSaveCtxt xmlSaveCtxt; +typedef xmlSaveCtxt *xmlSaveCtxtPtr; + +XMLPUBFUN xmlSaveCtxtPtr + xmlSaveToFd (int fd, + const char *encoding, + int options); +XMLPUBFUN xmlSaveCtxtPtr + xmlSaveToFilename (const char *filename, + const char *encoding, + int options); + +XMLPUBFUN xmlSaveCtxtPtr + xmlSaveToBuffer (xmlBufferPtr buffer, + const char *encoding, + int options); + +XMLPUBFUN xmlSaveCtxtPtr + xmlSaveToIO (xmlOutputWriteCallback iowrite, + xmlOutputCloseCallback ioclose, + void *ioctx, + const char *encoding, + int options); + +XMLPUBFUN long + xmlSaveDoc (xmlSaveCtxtPtr ctxt, + xmlDocPtr doc); +XMLPUBFUN long + xmlSaveTree (xmlSaveCtxtPtr ctxt, + xmlNodePtr node); + +XMLPUBFUN int + xmlSaveFlush (xmlSaveCtxtPtr ctxt); +XMLPUBFUN int + xmlSaveClose (xmlSaveCtxtPtr ctxt); +XMLPUBFUN int + xmlSaveFinish (xmlSaveCtxtPtr ctxt); +XMLPUBFUN int + xmlSaveSetEscape (xmlSaveCtxtPtr ctxt, + xmlCharEncodingOutputFunc escape); +XMLPUBFUN int + xmlSaveSetAttrEscape (xmlSaveCtxtPtr ctxt, + xmlCharEncodingOutputFunc escape); + +XML_DEPRECATED +XMLPUBFUN int + xmlThrDefIndentTreeOutput(int v); +XML_DEPRECATED +XMLPUBFUN const char * + xmlThrDefTreeIndentString(const char * v); +XML_DEPRECATED +XMLPUBFUN int + xmlThrDefSaveNoEmptyTags(int v); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_OUTPUT_ENABLED */ +#endif /* __XML_XMLSAVE_H__ */ + + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemas.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemas.h new file mode 100644 index 000000000..c2af3d709 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemas.h @@ -0,0 +1,249 @@ +/* + * Summary: incomplete XML Schemas structure implementation + * Description: interface to the XML Schemas handling and schema validity + * checking, it is incomplete right now. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SCHEMA_H__ +#define __XML_SCHEMA_H__ + +#include + +#ifdef LIBXML_SCHEMAS_ENABLED + +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * This error codes are obsolete; not used any more. + */ +typedef enum { + XML_SCHEMAS_ERR_OK = 0, + XML_SCHEMAS_ERR_NOROOT = 1, + XML_SCHEMAS_ERR_UNDECLAREDELEM, + XML_SCHEMAS_ERR_NOTTOPLEVEL, + XML_SCHEMAS_ERR_MISSING, + XML_SCHEMAS_ERR_WRONGELEM, + XML_SCHEMAS_ERR_NOTYPE, + XML_SCHEMAS_ERR_NOROLLBACK, + XML_SCHEMAS_ERR_ISABSTRACT, + XML_SCHEMAS_ERR_NOTEMPTY, + XML_SCHEMAS_ERR_ELEMCONT, + XML_SCHEMAS_ERR_HAVEDEFAULT, + XML_SCHEMAS_ERR_NOTNILLABLE, + XML_SCHEMAS_ERR_EXTRACONTENT, + XML_SCHEMAS_ERR_INVALIDATTR, + XML_SCHEMAS_ERR_INVALIDELEM, + XML_SCHEMAS_ERR_NOTDETERMINIST, + XML_SCHEMAS_ERR_CONSTRUCT, + XML_SCHEMAS_ERR_INTERNAL, + XML_SCHEMAS_ERR_NOTSIMPLE, + XML_SCHEMAS_ERR_ATTRUNKNOWN, + XML_SCHEMAS_ERR_ATTRINVALID, + XML_SCHEMAS_ERR_VALUE, + XML_SCHEMAS_ERR_FACET, + XML_SCHEMAS_ERR_, + XML_SCHEMAS_ERR_XXX +} xmlSchemaValidError; + +/* +* ATTENTION: Change xmlSchemaSetValidOptions's check +* for invalid values, if adding to the validation +* options below. +*/ +/** + * xmlSchemaValidOption: + * + * This is the set of XML Schema validation options. + */ +typedef enum { + XML_SCHEMA_VAL_VC_I_CREATE = 1<<0 + /* Default/fixed: create an attribute node + * or an element's text node on the instance. + */ +} xmlSchemaValidOption; + +/* + XML_SCHEMA_VAL_XSI_ASSEMBLE = 1<<1, + * assemble schemata using + * xsi:schemaLocation and + * xsi:noNamespaceSchemaLocation +*/ + +/** + * The schemas related types are kept internal + */ +typedef struct _xmlSchema xmlSchema; +typedef xmlSchema *xmlSchemaPtr; + +/** + * xmlSchemaValidityErrorFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of an error callback from an XSD validation + */ +typedef void (*xmlSchemaValidityErrorFunc) + (void *ctx, const char *msg, ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * xmlSchemaValidityWarningFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of a warning callback from an XSD validation + */ +typedef void (*xmlSchemaValidityWarningFunc) + (void *ctx, const char *msg, ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * A schemas validation context + */ +typedef struct _xmlSchemaParserCtxt xmlSchemaParserCtxt; +typedef xmlSchemaParserCtxt *xmlSchemaParserCtxtPtr; + +typedef struct _xmlSchemaValidCtxt xmlSchemaValidCtxt; +typedef xmlSchemaValidCtxt *xmlSchemaValidCtxtPtr; + +/** + * xmlSchemaValidityLocatorFunc: + * @ctx: user provided context + * @file: returned file information + * @line: returned line information + * + * A schemas validation locator, a callback called by the validator. + * This is used when file or node information are not available + * to find out what file and line number are affected + * + * Returns: 0 in case of success and -1 in case of error + */ + +typedef int (*xmlSchemaValidityLocatorFunc) (void *ctx, + const char **file, unsigned long *line); + +/* + * Interfaces for parsing. + */ +XMLPUBFUN xmlSchemaParserCtxtPtr + xmlSchemaNewParserCtxt (const char *URL); +XMLPUBFUN xmlSchemaParserCtxtPtr + xmlSchemaNewMemParserCtxt (const char *buffer, + int size); +XMLPUBFUN xmlSchemaParserCtxtPtr + xmlSchemaNewDocParserCtxt (xmlDocPtr doc); +XMLPUBFUN void + xmlSchemaFreeParserCtxt (xmlSchemaParserCtxtPtr ctxt); +XMLPUBFUN void + xmlSchemaSetParserErrors (xmlSchemaParserCtxtPtr ctxt, + xmlSchemaValidityErrorFunc err, + xmlSchemaValidityWarningFunc warn, + void *ctx); +XMLPUBFUN void + xmlSchemaSetParserStructuredErrors(xmlSchemaParserCtxtPtr ctxt, + xmlStructuredErrorFunc serror, + void *ctx); +XMLPUBFUN int + xmlSchemaGetParserErrors(xmlSchemaParserCtxtPtr ctxt, + xmlSchemaValidityErrorFunc * err, + xmlSchemaValidityWarningFunc * warn, + void **ctx); +XMLPUBFUN int + xmlSchemaIsValid (xmlSchemaValidCtxtPtr ctxt); + +XMLPUBFUN xmlSchemaPtr + xmlSchemaParse (xmlSchemaParserCtxtPtr ctxt); +XMLPUBFUN void + xmlSchemaFree (xmlSchemaPtr schema); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlSchemaDump (FILE *output, + xmlSchemaPtr schema); +#endif /* LIBXML_OUTPUT_ENABLED */ +/* + * Interfaces for validating + */ +XMLPUBFUN void + xmlSchemaSetValidErrors (xmlSchemaValidCtxtPtr ctxt, + xmlSchemaValidityErrorFunc err, + xmlSchemaValidityWarningFunc warn, + void *ctx); +XMLPUBFUN void + xmlSchemaSetValidStructuredErrors(xmlSchemaValidCtxtPtr ctxt, + xmlStructuredErrorFunc serror, + void *ctx); +XMLPUBFUN int + xmlSchemaGetValidErrors (xmlSchemaValidCtxtPtr ctxt, + xmlSchemaValidityErrorFunc *err, + xmlSchemaValidityWarningFunc *warn, + void **ctx); +XMLPUBFUN int + xmlSchemaSetValidOptions (xmlSchemaValidCtxtPtr ctxt, + int options); +XMLPUBFUN void + xmlSchemaValidateSetFilename(xmlSchemaValidCtxtPtr vctxt, + const char *filename); +XMLPUBFUN int + xmlSchemaValidCtxtGetOptions(xmlSchemaValidCtxtPtr ctxt); + +XMLPUBFUN xmlSchemaValidCtxtPtr + xmlSchemaNewValidCtxt (xmlSchemaPtr schema); +XMLPUBFUN void + xmlSchemaFreeValidCtxt (xmlSchemaValidCtxtPtr ctxt); +XMLPUBFUN int + xmlSchemaValidateDoc (xmlSchemaValidCtxtPtr ctxt, + xmlDocPtr instance); +XMLPUBFUN int + xmlSchemaValidateOneElement (xmlSchemaValidCtxtPtr ctxt, + xmlNodePtr elem); +XMLPUBFUN int + xmlSchemaValidateStream (xmlSchemaValidCtxtPtr ctxt, + xmlParserInputBufferPtr input, + xmlCharEncoding enc, + xmlSAXHandlerPtr sax, + void *user_data); +XMLPUBFUN int + xmlSchemaValidateFile (xmlSchemaValidCtxtPtr ctxt, + const char * filename, + int options); + +XMLPUBFUN xmlParserCtxtPtr + xmlSchemaValidCtxtGetParserCtxt(xmlSchemaValidCtxtPtr ctxt); + +/* + * Interface to insert Schemas SAX validation in a SAX stream + */ +typedef struct _xmlSchemaSAXPlug xmlSchemaSAXPlugStruct; +typedef xmlSchemaSAXPlugStruct *xmlSchemaSAXPlugPtr; + +XMLPUBFUN xmlSchemaSAXPlugPtr + xmlSchemaSAXPlug (xmlSchemaValidCtxtPtr ctxt, + xmlSAXHandlerPtr *sax, + void **user_data); +XMLPUBFUN int + xmlSchemaSAXUnplug (xmlSchemaSAXPlugPtr plug); + + +XMLPUBFUN void + xmlSchemaValidateSetLocator (xmlSchemaValidCtxtPtr vctxt, + xmlSchemaValidityLocatorFunc f, + void *ctxt); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMAS_ENABLED */ +#endif /* __XML_SCHEMA_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemastypes.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemastypes.h new file mode 100644 index 000000000..e2cde3570 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlschemastypes.h @@ -0,0 +1,152 @@ +/* + * Summary: implementation of XML Schema Datatypes + * Description: module providing the XML Schema Datatypes implementation + * both definition and validity checking + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SCHEMA_TYPES_H__ +#define __XML_SCHEMA_TYPES_H__ + +#include + +#ifdef LIBXML_SCHEMAS_ENABLED + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum { + XML_SCHEMA_WHITESPACE_UNKNOWN = 0, + XML_SCHEMA_WHITESPACE_PRESERVE = 1, + XML_SCHEMA_WHITESPACE_REPLACE = 2, + XML_SCHEMA_WHITESPACE_COLLAPSE = 3 +} xmlSchemaWhitespaceValueType; + +XMLPUBFUN int + xmlSchemaInitTypes (void); +XML_DEPRECATED +XMLPUBFUN void + xmlSchemaCleanupTypes (void); +XMLPUBFUN xmlSchemaTypePtr + xmlSchemaGetPredefinedType (const xmlChar *name, + const xmlChar *ns); +XMLPUBFUN int + xmlSchemaValidatePredefinedType (xmlSchemaTypePtr type, + const xmlChar *value, + xmlSchemaValPtr *val); +XMLPUBFUN int + xmlSchemaValPredefTypeNode (xmlSchemaTypePtr type, + const xmlChar *value, + xmlSchemaValPtr *val, + xmlNodePtr node); +XMLPUBFUN int + xmlSchemaValidateFacet (xmlSchemaTypePtr base, + xmlSchemaFacetPtr facet, + const xmlChar *value, + xmlSchemaValPtr val); +XMLPUBFUN int + xmlSchemaValidateFacetWhtsp (xmlSchemaFacetPtr facet, + xmlSchemaWhitespaceValueType fws, + xmlSchemaValType valType, + const xmlChar *value, + xmlSchemaValPtr val, + xmlSchemaWhitespaceValueType ws); +XMLPUBFUN void + xmlSchemaFreeValue (xmlSchemaValPtr val); +XMLPUBFUN xmlSchemaFacetPtr + xmlSchemaNewFacet (void); +XMLPUBFUN int + xmlSchemaCheckFacet (xmlSchemaFacetPtr facet, + xmlSchemaTypePtr typeDecl, + xmlSchemaParserCtxtPtr ctxt, + const xmlChar *name); +XMLPUBFUN void + xmlSchemaFreeFacet (xmlSchemaFacetPtr facet); +XMLPUBFUN int + xmlSchemaCompareValues (xmlSchemaValPtr x, + xmlSchemaValPtr y); +XMLPUBFUN xmlSchemaTypePtr + xmlSchemaGetBuiltInListSimpleTypeItemType (xmlSchemaTypePtr type); +XMLPUBFUN int + xmlSchemaValidateListSimpleTypeFacet (xmlSchemaFacetPtr facet, + const xmlChar *value, + unsigned long actualLen, + unsigned long *expectedLen); +XMLPUBFUN xmlSchemaTypePtr + xmlSchemaGetBuiltInType (xmlSchemaValType type); +XMLPUBFUN int + xmlSchemaIsBuiltInTypeFacet (xmlSchemaTypePtr type, + int facetType); +XMLPUBFUN xmlChar * + xmlSchemaCollapseString (const xmlChar *value); +XMLPUBFUN xmlChar * + xmlSchemaWhiteSpaceReplace (const xmlChar *value); +XMLPUBFUN unsigned long + xmlSchemaGetFacetValueAsULong (xmlSchemaFacetPtr facet); +XMLPUBFUN int + xmlSchemaValidateLengthFacet (xmlSchemaTypePtr type, + xmlSchemaFacetPtr facet, + const xmlChar *value, + xmlSchemaValPtr val, + unsigned long *length); +XMLPUBFUN int + xmlSchemaValidateLengthFacetWhtsp(xmlSchemaFacetPtr facet, + xmlSchemaValType valType, + const xmlChar *value, + xmlSchemaValPtr val, + unsigned long *length, + xmlSchemaWhitespaceValueType ws); +XMLPUBFUN int + xmlSchemaValPredefTypeNodeNoNorm(xmlSchemaTypePtr type, + const xmlChar *value, + xmlSchemaValPtr *val, + xmlNodePtr node); +XMLPUBFUN int + xmlSchemaGetCanonValue (xmlSchemaValPtr val, + const xmlChar **retValue); +XMLPUBFUN int + xmlSchemaGetCanonValueWhtsp (xmlSchemaValPtr val, + const xmlChar **retValue, + xmlSchemaWhitespaceValueType ws); +XMLPUBFUN int + xmlSchemaValueAppend (xmlSchemaValPtr prev, + xmlSchemaValPtr cur); +XMLPUBFUN xmlSchemaValPtr + xmlSchemaValueGetNext (xmlSchemaValPtr cur); +XMLPUBFUN const xmlChar * + xmlSchemaValueGetAsString (xmlSchemaValPtr val); +XMLPUBFUN int + xmlSchemaValueGetAsBoolean (xmlSchemaValPtr val); +XMLPUBFUN xmlSchemaValPtr + xmlSchemaNewStringValue (xmlSchemaValType type, + const xmlChar *value); +XMLPUBFUN xmlSchemaValPtr + xmlSchemaNewNOTATIONValue (const xmlChar *name, + const xmlChar *ns); +XMLPUBFUN xmlSchemaValPtr + xmlSchemaNewQNameValue (const xmlChar *namespaceName, + const xmlChar *localName); +XMLPUBFUN int + xmlSchemaCompareValuesWhtsp (xmlSchemaValPtr x, + xmlSchemaWhitespaceValueType xws, + xmlSchemaValPtr y, + xmlSchemaWhitespaceValueType yws); +XMLPUBFUN xmlSchemaValPtr + xmlSchemaCopyValue (xmlSchemaValPtr val); +XMLPUBFUN xmlSchemaValType + xmlSchemaGetValType (xmlSchemaValPtr val); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMAS_ENABLED */ +#endif /* __XML_SCHEMA_TYPES_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlstring.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlstring.h new file mode 100644 index 000000000..db11a0b0e --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlstring.h @@ -0,0 +1,140 @@ +/* + * Summary: set of routines to process strings + * Description: type and interfaces needed for the internal string handling + * of the library, especially UTF8 processing. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_STRING_H__ +#define __XML_STRING_H__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlChar: + * + * This is a basic byte in an UTF-8 encoded string. + * It's unsigned allowing to pinpoint case where char * are assigned + * to xmlChar * (possibly making serialization back impossible). + */ +typedef unsigned char xmlChar; + +/** + * BAD_CAST: + * + * Macro to cast a string to an xmlChar * when one know its safe. + */ +#define BAD_CAST (xmlChar *) + +/* + * xmlChar handling + */ +XMLPUBFUN xmlChar * + xmlStrdup (const xmlChar *cur); +XMLPUBFUN xmlChar * + xmlStrndup (const xmlChar *cur, + int len); +XMLPUBFUN xmlChar * + xmlCharStrndup (const char *cur, + int len); +XMLPUBFUN xmlChar * + xmlCharStrdup (const char *cur); +XMLPUBFUN xmlChar * + xmlStrsub (const xmlChar *str, + int start, + int len); +XMLPUBFUN const xmlChar * + xmlStrchr (const xmlChar *str, + xmlChar val); +XMLPUBFUN const xmlChar * + xmlStrstr (const xmlChar *str, + const xmlChar *val); +XMLPUBFUN const xmlChar * + xmlStrcasestr (const xmlChar *str, + const xmlChar *val); +XMLPUBFUN int + xmlStrcmp (const xmlChar *str1, + const xmlChar *str2); +XMLPUBFUN int + xmlStrncmp (const xmlChar *str1, + const xmlChar *str2, + int len); +XMLPUBFUN int + xmlStrcasecmp (const xmlChar *str1, + const xmlChar *str2); +XMLPUBFUN int + xmlStrncasecmp (const xmlChar *str1, + const xmlChar *str2, + int len); +XMLPUBFUN int + xmlStrEqual (const xmlChar *str1, + const xmlChar *str2); +XMLPUBFUN int + xmlStrQEqual (const xmlChar *pref, + const xmlChar *name, + const xmlChar *str); +XMLPUBFUN int + xmlStrlen (const xmlChar *str); +XMLPUBFUN xmlChar * + xmlStrcat (xmlChar *cur, + const xmlChar *add); +XMLPUBFUN xmlChar * + xmlStrncat (xmlChar *cur, + const xmlChar *add, + int len); +XMLPUBFUN xmlChar * + xmlStrncatNew (const xmlChar *str1, + const xmlChar *str2, + int len); +XMLPUBFUN int + xmlStrPrintf (xmlChar *buf, + int len, + const char *msg, + ...) LIBXML_ATTR_FORMAT(3,4); +XMLPUBFUN int + xmlStrVPrintf (xmlChar *buf, + int len, + const char *msg, + va_list ap) LIBXML_ATTR_FORMAT(3,0); + +XMLPUBFUN int + xmlGetUTF8Char (const unsigned char *utf, + int *len); +XMLPUBFUN int + xmlCheckUTF8 (const unsigned char *utf); +XMLPUBFUN int + xmlUTF8Strsize (const xmlChar *utf, + int len); +XMLPUBFUN xmlChar * + xmlUTF8Strndup (const xmlChar *utf, + int len); +XMLPUBFUN const xmlChar * + xmlUTF8Strpos (const xmlChar *utf, + int pos); +XMLPUBFUN int + xmlUTF8Strloc (const xmlChar *utf, + const xmlChar *utfchar); +XMLPUBFUN xmlChar * + xmlUTF8Strsub (const xmlChar *utf, + int start, + int len); +XMLPUBFUN int + xmlUTF8Strlen (const xmlChar *utf); +XMLPUBFUN int + xmlUTF8Size (const xmlChar *utf); +XMLPUBFUN int + xmlUTF8Charcmp (const xmlChar *utf1, + const xmlChar *utf2); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_STRING_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlunicode.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlunicode.h new file mode 100644 index 000000000..b6d795b26 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlunicode.h @@ -0,0 +1,366 @@ +/* + * Summary: Unicode character APIs + * Description: API for the Unicode character APIs + * + * This file is automatically generated from the + * UCS description files of the Unicode Character Database + * http://www.unicode.org/Public/4.0-Update1/UCD-4.0.1.html + * using the genUnicode.py Python script. + * + * Generation date: Tue Apr 30 17:30:38 2024 + * Sources: Blocks-4.0.1.txt UnicodeData-4.0.1.txt + * Author: Daniel Veillard + */ + +#ifndef __XML_UNICODE_H__ +#define __XML_UNICODE_H__ + +#include + +#ifdef LIBXML_UNICODE_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsAegeanNumbers (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsAlphabeticPresentationForms (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsArabic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsArabicPresentationFormsA (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsArabicPresentationFormsB (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsArmenian (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsArrows (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBasicLatin (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBengali (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBlockElements (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBopomofo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBopomofoExtended (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBoxDrawing (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBraillePatterns (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsBuhid (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsByzantineMusicalSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKCompatibility (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKCompatibilityForms (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKCompatibilityIdeographs (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKCompatibilityIdeographsSupplement (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKRadicalsSupplement (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKSymbolsandPunctuation (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKUnifiedIdeographs (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKUnifiedIdeographsExtensionA (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCJKUnifiedIdeographsExtensionB (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCherokee (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCombiningDiacriticalMarks (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCombiningDiacriticalMarksforSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCombiningHalfMarks (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCombiningMarksforSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsControlPictures (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCurrencySymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCypriotSyllabary (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCyrillic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCyrillicSupplement (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsDeseret (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsDevanagari (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsDingbats (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsEnclosedAlphanumerics (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsEnclosedCJKLettersandMonths (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsEthiopic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGeneralPunctuation (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGeometricShapes (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGeorgian (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGothic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGreek (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGreekExtended (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGreekandCoptic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGujarati (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsGurmukhi (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHalfwidthandFullwidthForms (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHangulCompatibilityJamo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHangulJamo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHangulSyllables (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHanunoo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHebrew (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHighPrivateUseSurrogates (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHighSurrogates (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsHiragana (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsIPAExtensions (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsIdeographicDescriptionCharacters (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKanbun (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKangxiRadicals (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKannada (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKatakana (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKatakanaPhoneticExtensions (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKhmer (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsKhmerSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLao (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLatin1Supplement (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLatinExtendedA (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLatinExtendedB (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLatinExtendedAdditional (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLetterlikeSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLimbu (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLinearBIdeograms (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLinearBSyllabary (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsLowSurrogates (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMalayalam (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMathematicalAlphanumericSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMathematicalOperators (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMiscellaneousMathematicalSymbolsA (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMiscellaneousMathematicalSymbolsB (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMiscellaneousSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMiscellaneousSymbolsandArrows (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMiscellaneousTechnical (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMongolian (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMusicalSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsMyanmar (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsNumberForms (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsOgham (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsOldItalic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsOpticalCharacterRecognition (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsOriya (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsOsmanya (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsPhoneticExtensions (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsPrivateUse (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsPrivateUseArea (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsRunic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsShavian (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSinhala (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSmallFormVariants (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSpacingModifierLetters (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSpecials (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSuperscriptsandSubscripts (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSupplementalArrowsA (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSupplementalArrowsB (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSupplementalMathematicalOperators (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSupplementaryPrivateUseAreaA (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSupplementaryPrivateUseAreaB (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsSyriac (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTagalog (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTagbanwa (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTags (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTaiLe (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTaiXuanJingSymbols (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTamil (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTelugu (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsThaana (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsThai (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsTibetan (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsUgaritic (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsUnifiedCanadianAboriginalSyllabics (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsVariationSelectors (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsVariationSelectorsSupplement (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsYiRadicals (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsYiSyllables (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsYijingHexagramSymbols (int code); + +XMLPUBFUN int xmlUCSIsBlock (int code, const char *block); + +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatC (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatCc (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatCf (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatCo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatCs (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatL (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatLl (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatLm (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatLo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatLt (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatLu (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatM (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatMc (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatMe (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatMn (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatN (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatNd (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatNl (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatNo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatP (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPc (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPd (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPe (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPf (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPi (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatPs (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatS (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatSc (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatSk (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatSm (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatSo (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatZ (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatZl (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatZp (int code); +XML_DEPRECATED +XMLPUBFUN int xmlUCSIsCatZs (int code); + +XMLPUBFUN int xmlUCSIsCat (int code, const char *cat); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_UNICODE_ENABLED */ + +#endif /* __XML_UNICODE_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlversion.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlversion.h new file mode 100644 index 000000000..4351478d8 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlversion.h @@ -0,0 +1,347 @@ +/* + * Summary: compile-time version information + * Description: compile-time version information for the XML library + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_VERSION_H__ +#define __XML_VERSION_H__ + +/** + * LIBXML_DOTTED_VERSION: + * + * the version string like "1.2.3" + */ +#define LIBXML_DOTTED_VERSION "2.13.8" + +/** + * LIBXML_VERSION: + * + * the version number: 1.2.3 value is 10203 + */ +#define LIBXML_VERSION 21308 + +/** + * LIBXML_VERSION_STRING: + * + * the version number string, 1.2.3 value is "10203" + */ +#define LIBXML_VERSION_STRING "21308" + +/** + * LIBXML_VERSION_EXTRA: + * + * extra version information, used to show a git commit description + */ +#define LIBXML_VERSION_EXTRA "" + +/** + * LIBXML_TEST_VERSION: + * + * Macro to check that the libxml version in use is compatible with + * the version the software has been compiled against + */ +#define LIBXML_TEST_VERSION xmlCheckVersion(21308); + +/** + * LIBXML_THREAD_ENABLED: + * + * Whether the thread support is configured in + */ +#if 1 +#define LIBXML_THREAD_ENABLED +#endif + +/** + * LIBXML_THREAD_ALLOC_ENABLED: + * + * Whether the allocation hooks are per-thread + */ +#if 0 +#define LIBXML_THREAD_ALLOC_ENABLED +#endif + +/** + * LIBXML_TREE_ENABLED: + * + * Whether the DOM like tree manipulation API support is configured in + */ +#if 1 +#define LIBXML_TREE_ENABLED +#endif + +/** + * LIBXML_OUTPUT_ENABLED: + * + * Whether the serialization/saving support is configured in + */ +#if 1 +#define LIBXML_OUTPUT_ENABLED +#endif + +/** + * LIBXML_PUSH_ENABLED: + * + * Whether the push parsing interfaces are configured in + */ +#if 1 +#define LIBXML_PUSH_ENABLED +#endif + +/** + * LIBXML_READER_ENABLED: + * + * Whether the xmlReader parsing interface is configured in + */ +#if 1 +#define LIBXML_READER_ENABLED +#endif + +/** + * LIBXML_PATTERN_ENABLED: + * + * Whether the xmlPattern node selection interface is configured in + */ +#if 1 +#define LIBXML_PATTERN_ENABLED +#endif + +/** + * LIBXML_WRITER_ENABLED: + * + * Whether the xmlWriter saving interface is configured in + */ +#if 1 +#define LIBXML_WRITER_ENABLED +#endif + +/** + * LIBXML_SAX1_ENABLED: + * + * Whether the older SAX1 interface is configured in + */ +#if 1 +#define LIBXML_SAX1_ENABLED +#endif + +/** + * LIBXML_FTP_ENABLED: + * + * Whether the FTP support is configured in + */ +#if 0 +#define LIBXML_FTP_ENABLED +#endif + +/** + * LIBXML_HTTP_ENABLED: + * + * Whether the HTTP support is configured in + */ +#if 1 +#define LIBXML_HTTP_ENABLED +#endif + +/** + * LIBXML_VALID_ENABLED: + * + * Whether the DTD validation support is configured in + */ +#if 1 +#define LIBXML_VALID_ENABLED +#endif + +/** + * LIBXML_HTML_ENABLED: + * + * Whether the HTML support is configured in + */ +#if 1 +#define LIBXML_HTML_ENABLED +#endif + +/** + * LIBXML_LEGACY_ENABLED: + * + * Whether the deprecated APIs are compiled in for compatibility + */ +#if 1 +#define LIBXML_LEGACY_ENABLED +#endif + +/** + * LIBXML_C14N_ENABLED: + * + * Whether the Canonicalization support is configured in + */ +#if 1 +#define LIBXML_C14N_ENABLED +#endif + +/** + * LIBXML_CATALOG_ENABLED: + * + * Whether the Catalog support is configured in + */ +#if 1 +#define LIBXML_CATALOG_ENABLED +#endif + +/** + * LIBXML_XPATH_ENABLED: + * + * Whether XPath is configured in + */ +#if 1 +#define LIBXML_XPATH_ENABLED +#endif + +/** + * LIBXML_XPTR_ENABLED: + * + * Whether XPointer is configured in + */ +#if 1 +#define LIBXML_XPTR_ENABLED +#endif + +/** + * LIBXML_XPTR_LOCS_ENABLED: + * + * Whether support for XPointer locations is configured in + */ +#if 0 +#define LIBXML_XPTR_LOCS_ENABLED +#endif + +/** + * LIBXML_XINCLUDE_ENABLED: + * + * Whether XInclude is configured in + */ +#if 1 +#define LIBXML_XINCLUDE_ENABLED +#endif + +/** + * LIBXML_ICONV_ENABLED: + * + * Whether iconv support is available + */ +#if 1 +#define LIBXML_ICONV_ENABLED +#endif + +/** + * LIBXML_ICU_ENABLED: + * + * Whether icu support is available + */ +#if 0 +#define LIBXML_ICU_ENABLED +#endif + +/** + * LIBXML_ISO8859X_ENABLED: + * + * Whether ISO-8859-* support is made available in case iconv is not + */ +#if 1 +#define LIBXML_ISO8859X_ENABLED +#endif + +/** + * LIBXML_DEBUG_ENABLED: + * + * Whether Debugging module is configured in + */ +#if 1 +#define LIBXML_DEBUG_ENABLED +#endif + +/** + * LIBXML_UNICODE_ENABLED: + * + * Whether the Unicode related interfaces are compiled in + */ +#if 1 +#define LIBXML_UNICODE_ENABLED +#endif + +/** + * LIBXML_REGEXP_ENABLED: + * + * Whether the regular expressions interfaces are compiled in + */ +#if 1 +#define LIBXML_REGEXP_ENABLED +#endif + +/** + * LIBXML_AUTOMATA_ENABLED: + * + * Whether the automata interfaces are compiled in + */ +#if 1 +#define LIBXML_AUTOMATA_ENABLED +#endif + +/** + * LIBXML_SCHEMAS_ENABLED: + * + * Whether the Schemas validation interfaces are compiled in + */ +#if 1 +#define LIBXML_SCHEMAS_ENABLED +#endif + +/** + * LIBXML_SCHEMATRON_ENABLED: + * + * Whether the Schematron validation interfaces are compiled in + */ +#if 1 +#define LIBXML_SCHEMATRON_ENABLED +#endif + +/** + * LIBXML_MODULES_ENABLED: + * + * Whether the module interfaces are compiled in + */ +#if 1 +#define LIBXML_MODULES_ENABLED +/** + * LIBXML_MODULE_EXTENSION: + * + * the string suffix used by dynamic modules (usually shared libraries) + */ +#define LIBXML_MODULE_EXTENSION ".so" +#endif + +/** + * LIBXML_ZLIB_ENABLED: + * + * Whether the Zlib support is compiled in + */ +#if 1 +#define LIBXML_ZLIB_ENABLED +#endif + +/** + * LIBXML_LZMA_ENABLED: + * + * Whether the Lzma support is compiled in + */ +#if 0 +#define LIBXML_LZMA_ENABLED +#endif + +#include + +#endif + + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlwriter.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlwriter.h new file mode 100644 index 000000000..55f88bc71 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xmlwriter.h @@ -0,0 +1,489 @@ +/* + * Summary: text writing API for XML + * Description: text writing API for XML + * + * Copy: See Copyright for the status of this software. + * + * Author: Alfred Mickautsch + */ + +#ifndef __XML_XMLWRITER_H__ +#define __XML_XMLWRITER_H__ + +#include + +#ifdef LIBXML_WRITER_ENABLED + +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + + typedef struct _xmlTextWriter xmlTextWriter; + typedef xmlTextWriter *xmlTextWriterPtr; + +/* + * Constructors & Destructor + */ + XMLPUBFUN xmlTextWriterPtr + xmlNewTextWriter(xmlOutputBufferPtr out); + XMLPUBFUN xmlTextWriterPtr + xmlNewTextWriterFilename(const char *uri, int compression); + XMLPUBFUN xmlTextWriterPtr + xmlNewTextWriterMemory(xmlBufferPtr buf, int compression); + XMLPUBFUN xmlTextWriterPtr + xmlNewTextWriterPushParser(xmlParserCtxtPtr ctxt, int compression); + XMLPUBFUN xmlTextWriterPtr + xmlNewTextWriterDoc(xmlDocPtr * doc, int compression); + XMLPUBFUN xmlTextWriterPtr + xmlNewTextWriterTree(xmlDocPtr doc, xmlNodePtr node, + int compression); + XMLPUBFUN void xmlFreeTextWriter(xmlTextWriterPtr writer); + +/* + * Functions + */ + + +/* + * Document + */ + XMLPUBFUN int + xmlTextWriterStartDocument(xmlTextWriterPtr writer, + const char *version, + const char *encoding, + const char *standalone); + XMLPUBFUN int xmlTextWriterEndDocument(xmlTextWriterPtr + writer); + +/* + * Comments + */ + XMLPUBFUN int xmlTextWriterStartComment(xmlTextWriterPtr + writer); + XMLPUBFUN int xmlTextWriterEndComment(xmlTextWriterPtr writer); + XMLPUBFUN int + xmlTextWriterWriteFormatComment(xmlTextWriterPtr writer, + const char *format, ...) + LIBXML_ATTR_FORMAT(2,3); + XMLPUBFUN int + xmlTextWriterWriteVFormatComment(xmlTextWriterPtr writer, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(2,0); + XMLPUBFUN int xmlTextWriterWriteComment(xmlTextWriterPtr + writer, + const xmlChar * + content); + +/* + * Elements + */ + XMLPUBFUN int + xmlTextWriterStartElement(xmlTextWriterPtr writer, + const xmlChar * name); + XMLPUBFUN int xmlTextWriterStartElementNS(xmlTextWriterPtr + writer, + const xmlChar * + prefix, + const xmlChar * name, + const xmlChar * + namespaceURI); + XMLPUBFUN int xmlTextWriterEndElement(xmlTextWriterPtr writer); + XMLPUBFUN int xmlTextWriterFullEndElement(xmlTextWriterPtr + writer); + +/* + * Elements conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatElement(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, ...) + LIBXML_ATTR_FORMAT(3,4); + XMLPUBFUN int + xmlTextWriterWriteVFormatElement(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(3,0); + XMLPUBFUN int xmlTextWriterWriteElement(xmlTextWriterPtr + writer, + const xmlChar * name, + const xmlChar * + content); + XMLPUBFUN int + xmlTextWriterWriteFormatElementNS(xmlTextWriterPtr writer, + const xmlChar * prefix, + const xmlChar * name, + const xmlChar * namespaceURI, + const char *format, ...) + LIBXML_ATTR_FORMAT(5,6); + XMLPUBFUN int + xmlTextWriterWriteVFormatElementNS(xmlTextWriterPtr writer, + const xmlChar * prefix, + const xmlChar * name, + const xmlChar * namespaceURI, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(5,0); + XMLPUBFUN int xmlTextWriterWriteElementNS(xmlTextWriterPtr + writer, + const xmlChar * + prefix, + const xmlChar * name, + const xmlChar * + namespaceURI, + const xmlChar * + content); + +/* + * Text + */ + XMLPUBFUN int + xmlTextWriterWriteFormatRaw(xmlTextWriterPtr writer, + const char *format, ...) + LIBXML_ATTR_FORMAT(2,3); + XMLPUBFUN int + xmlTextWriterWriteVFormatRaw(xmlTextWriterPtr writer, + const char *format, va_list argptr) + LIBXML_ATTR_FORMAT(2,0); + XMLPUBFUN int + xmlTextWriterWriteRawLen(xmlTextWriterPtr writer, + const xmlChar * content, int len); + XMLPUBFUN int + xmlTextWriterWriteRaw(xmlTextWriterPtr writer, + const xmlChar * content); + XMLPUBFUN int xmlTextWriterWriteFormatString(xmlTextWriterPtr + writer, + const char + *format, ...) + LIBXML_ATTR_FORMAT(2,3); + XMLPUBFUN int xmlTextWriterWriteVFormatString(xmlTextWriterPtr + writer, + const char + *format, + va_list argptr) + LIBXML_ATTR_FORMAT(2,0); + XMLPUBFUN int xmlTextWriterWriteString(xmlTextWriterPtr writer, + const xmlChar * + content); + XMLPUBFUN int xmlTextWriterWriteBase64(xmlTextWriterPtr writer, + const char *data, + int start, int len); + XMLPUBFUN int xmlTextWriterWriteBinHex(xmlTextWriterPtr writer, + const char *data, + int start, int len); + +/* + * Attributes + */ + XMLPUBFUN int + xmlTextWriterStartAttribute(xmlTextWriterPtr writer, + const xmlChar * name); + XMLPUBFUN int xmlTextWriterStartAttributeNS(xmlTextWriterPtr + writer, + const xmlChar * + prefix, + const xmlChar * + name, + const xmlChar * + namespaceURI); + XMLPUBFUN int xmlTextWriterEndAttribute(xmlTextWriterPtr + writer); + +/* + * Attributes conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatAttribute(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, ...) + LIBXML_ATTR_FORMAT(3,4); + XMLPUBFUN int + xmlTextWriterWriteVFormatAttribute(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(3,0); + XMLPUBFUN int xmlTextWriterWriteAttribute(xmlTextWriterPtr + writer, + const xmlChar * name, + const xmlChar * + content); + XMLPUBFUN int + xmlTextWriterWriteFormatAttributeNS(xmlTextWriterPtr writer, + const xmlChar * prefix, + const xmlChar * name, + const xmlChar * namespaceURI, + const char *format, ...) + LIBXML_ATTR_FORMAT(5,6); + XMLPUBFUN int + xmlTextWriterWriteVFormatAttributeNS(xmlTextWriterPtr writer, + const xmlChar * prefix, + const xmlChar * name, + const xmlChar * namespaceURI, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(5,0); + XMLPUBFUN int xmlTextWriterWriteAttributeNS(xmlTextWriterPtr + writer, + const xmlChar * + prefix, + const xmlChar * + name, + const xmlChar * + namespaceURI, + const xmlChar * + content); + +/* + * PI's + */ + XMLPUBFUN int + xmlTextWriterStartPI(xmlTextWriterPtr writer, + const xmlChar * target); + XMLPUBFUN int xmlTextWriterEndPI(xmlTextWriterPtr writer); + +/* + * PI conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatPI(xmlTextWriterPtr writer, + const xmlChar * target, + const char *format, ...) + LIBXML_ATTR_FORMAT(3,4); + XMLPUBFUN int + xmlTextWriterWriteVFormatPI(xmlTextWriterPtr writer, + const xmlChar * target, + const char *format, va_list argptr) + LIBXML_ATTR_FORMAT(3,0); + XMLPUBFUN int + xmlTextWriterWritePI(xmlTextWriterPtr writer, + const xmlChar * target, + const xmlChar * content); + +/** + * xmlTextWriterWriteProcessingInstruction: + * + * This macro maps to xmlTextWriterWritePI + */ +#define xmlTextWriterWriteProcessingInstruction xmlTextWriterWritePI + +/* + * CDATA + */ + XMLPUBFUN int xmlTextWriterStartCDATA(xmlTextWriterPtr writer); + XMLPUBFUN int xmlTextWriterEndCDATA(xmlTextWriterPtr writer); + +/* + * CDATA conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatCDATA(xmlTextWriterPtr writer, + const char *format, ...) + LIBXML_ATTR_FORMAT(2,3); + XMLPUBFUN int + xmlTextWriterWriteVFormatCDATA(xmlTextWriterPtr writer, + const char *format, va_list argptr) + LIBXML_ATTR_FORMAT(2,0); + XMLPUBFUN int + xmlTextWriterWriteCDATA(xmlTextWriterPtr writer, + const xmlChar * content); + +/* + * DTD + */ + XMLPUBFUN int + xmlTextWriterStartDTD(xmlTextWriterPtr writer, + const xmlChar * name, + const xmlChar * pubid, + const xmlChar * sysid); + XMLPUBFUN int xmlTextWriterEndDTD(xmlTextWriterPtr writer); + +/* + * DTD conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatDTD(xmlTextWriterPtr writer, + const xmlChar * name, + const xmlChar * pubid, + const xmlChar * sysid, + const char *format, ...) + LIBXML_ATTR_FORMAT(5,6); + XMLPUBFUN int + xmlTextWriterWriteVFormatDTD(xmlTextWriterPtr writer, + const xmlChar * name, + const xmlChar * pubid, + const xmlChar * sysid, + const char *format, va_list argptr) + LIBXML_ATTR_FORMAT(5,0); + XMLPUBFUN int + xmlTextWriterWriteDTD(xmlTextWriterPtr writer, + const xmlChar * name, + const xmlChar * pubid, + const xmlChar * sysid, + const xmlChar * subset); + +/** + * xmlTextWriterWriteDocType: + * + * this macro maps to xmlTextWriterWriteDTD + */ +#define xmlTextWriterWriteDocType xmlTextWriterWriteDTD + +/* + * DTD element definition + */ + XMLPUBFUN int + xmlTextWriterStartDTDElement(xmlTextWriterPtr writer, + const xmlChar * name); + XMLPUBFUN int xmlTextWriterEndDTDElement(xmlTextWriterPtr + writer); + +/* + * DTD element definition conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatDTDElement(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, ...) + LIBXML_ATTR_FORMAT(3,4); + XMLPUBFUN int + xmlTextWriterWriteVFormatDTDElement(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(3,0); + XMLPUBFUN int xmlTextWriterWriteDTDElement(xmlTextWriterPtr + writer, + const xmlChar * + name, + const xmlChar * + content); + +/* + * DTD attribute list definition + */ + XMLPUBFUN int + xmlTextWriterStartDTDAttlist(xmlTextWriterPtr writer, + const xmlChar * name); + XMLPUBFUN int xmlTextWriterEndDTDAttlist(xmlTextWriterPtr + writer); + +/* + * DTD attribute list definition conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatDTDAttlist(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, ...) + LIBXML_ATTR_FORMAT(3,4); + XMLPUBFUN int + xmlTextWriterWriteVFormatDTDAttlist(xmlTextWriterPtr writer, + const xmlChar * name, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(3,0); + XMLPUBFUN int xmlTextWriterWriteDTDAttlist(xmlTextWriterPtr + writer, + const xmlChar * + name, + const xmlChar * + content); + +/* + * DTD entity definition + */ + XMLPUBFUN int + xmlTextWriterStartDTDEntity(xmlTextWriterPtr writer, + int pe, const xmlChar * name); + XMLPUBFUN int xmlTextWriterEndDTDEntity(xmlTextWriterPtr + writer); + +/* + * DTD entity definition conveniency functions + */ + XMLPUBFUN int + xmlTextWriterWriteFormatDTDInternalEntity(xmlTextWriterPtr writer, + int pe, + const xmlChar * name, + const char *format, ...) + LIBXML_ATTR_FORMAT(4,5); + XMLPUBFUN int + xmlTextWriterWriteVFormatDTDInternalEntity(xmlTextWriterPtr writer, + int pe, + const xmlChar * name, + const char *format, + va_list argptr) + LIBXML_ATTR_FORMAT(4,0); + XMLPUBFUN int + xmlTextWriterWriteDTDInternalEntity(xmlTextWriterPtr writer, + int pe, + const xmlChar * name, + const xmlChar * content); + XMLPUBFUN int + xmlTextWriterWriteDTDExternalEntity(xmlTextWriterPtr writer, + int pe, + const xmlChar * name, + const xmlChar * pubid, + const xmlChar * sysid, + const xmlChar * ndataid); + XMLPUBFUN int + xmlTextWriterWriteDTDExternalEntityContents(xmlTextWriterPtr + writer, + const xmlChar * pubid, + const xmlChar * sysid, + const xmlChar * + ndataid); + XMLPUBFUN int xmlTextWriterWriteDTDEntity(xmlTextWriterPtr + writer, int pe, + const xmlChar * name, + const xmlChar * + pubid, + const xmlChar * + sysid, + const xmlChar * + ndataid, + const xmlChar * + content); + +/* + * DTD notation definition + */ + XMLPUBFUN int + xmlTextWriterWriteDTDNotation(xmlTextWriterPtr writer, + const xmlChar * name, + const xmlChar * pubid, + const xmlChar * sysid); + +/* + * Indentation + */ + XMLPUBFUN int + xmlTextWriterSetIndent(xmlTextWriterPtr writer, int indent); + XMLPUBFUN int + xmlTextWriterSetIndentString(xmlTextWriterPtr writer, + const xmlChar * str); + + XMLPUBFUN int + xmlTextWriterSetQuoteChar(xmlTextWriterPtr writer, xmlChar quotechar); + + +/* + * misc + */ + XMLPUBFUN int xmlTextWriterFlush(xmlTextWriterPtr writer); + XMLPUBFUN int xmlTextWriterClose(xmlTextWriterPtr writer); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_WRITER_ENABLED */ + +#endif /* __XML_XMLWRITER_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpath.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpath.h new file mode 100644 index 000000000..b89e105c0 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpath.h @@ -0,0 +1,579 @@ +/* + * Summary: XML Path Language implementation + * Description: API for the XML Path Language implementation + * + * XML Path Language implementation + * XPath is a language for addressing parts of an XML document, + * designed to be used by both XSLT and XPointer + * http://www.w3.org/TR/xpath + * + * Implements + * W3C Recommendation 16 November 1999 + * http://www.w3.org/TR/1999/REC-xpath-19991116 + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XPATH_H__ +#define __XML_XPATH_H__ + +#include + +#ifdef LIBXML_XPATH_ENABLED + +#include +#include +#include +#endif /* LIBXML_XPATH_ENABLED */ + +#if defined(LIBXML_XPATH_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +#ifdef __cplusplus +extern "C" { +#endif +#endif /* LIBXML_XPATH_ENABLED or LIBXML_SCHEMAS_ENABLED */ + +#ifdef LIBXML_XPATH_ENABLED + +typedef struct _xmlXPathContext xmlXPathContext; +typedef xmlXPathContext *xmlXPathContextPtr; +typedef struct _xmlXPathParserContext xmlXPathParserContext; +typedef xmlXPathParserContext *xmlXPathParserContextPtr; + +/** + * The set of XPath error codes. + */ + +typedef enum { + XPATH_EXPRESSION_OK = 0, + XPATH_NUMBER_ERROR, + XPATH_UNFINISHED_LITERAL_ERROR, + XPATH_START_LITERAL_ERROR, + XPATH_VARIABLE_REF_ERROR, + XPATH_UNDEF_VARIABLE_ERROR, + XPATH_INVALID_PREDICATE_ERROR, + XPATH_EXPR_ERROR, + XPATH_UNCLOSED_ERROR, + XPATH_UNKNOWN_FUNC_ERROR, + XPATH_INVALID_OPERAND, + XPATH_INVALID_TYPE, + XPATH_INVALID_ARITY, + XPATH_INVALID_CTXT_SIZE, + XPATH_INVALID_CTXT_POSITION, + XPATH_MEMORY_ERROR, + XPTR_SYNTAX_ERROR, + XPTR_RESOURCE_ERROR, + XPTR_SUB_RESOURCE_ERROR, + XPATH_UNDEF_PREFIX_ERROR, + XPATH_ENCODING_ERROR, + XPATH_INVALID_CHAR_ERROR, + XPATH_INVALID_CTXT, + XPATH_STACK_ERROR, + XPATH_FORBID_VARIABLE_ERROR, + XPATH_OP_LIMIT_EXCEEDED, + XPATH_RECURSION_LIMIT_EXCEEDED +} xmlXPathError; + +/* + * A node-set (an unordered collection of nodes without duplicates). + */ +typedef struct _xmlNodeSet xmlNodeSet; +typedef xmlNodeSet *xmlNodeSetPtr; +struct _xmlNodeSet { + int nodeNr; /* number of nodes in the set */ + int nodeMax; /* size of the array as allocated */ + xmlNodePtr *nodeTab; /* array of nodes in no particular order */ + /* @@ with_ns to check whether namespace nodes should be looked at @@ */ +}; + +/* + * An expression is evaluated to yield an object, which + * has one of the following four basic types: + * - node-set + * - boolean + * - number + * - string + * + * @@ XPointer will add more types ! + */ + +typedef enum { + XPATH_UNDEFINED = 0, + XPATH_NODESET = 1, + XPATH_BOOLEAN = 2, + XPATH_NUMBER = 3, + XPATH_STRING = 4, +#ifdef LIBXML_XPTR_LOCS_ENABLED + XPATH_POINT = 5, + XPATH_RANGE = 6, + XPATH_LOCATIONSET = 7, +#endif + XPATH_USERS = 8, + XPATH_XSLT_TREE = 9 /* An XSLT value tree, non modifiable */ +} xmlXPathObjectType; + +#ifndef LIBXML_XPTR_LOCS_ENABLED +/** DOC_DISABLE */ +#define XPATH_POINT 5 +#define XPATH_RANGE 6 +#define XPATH_LOCATIONSET 7 +/** DOC_ENABLE */ +#endif + +typedef struct _xmlXPathObject xmlXPathObject; +typedef xmlXPathObject *xmlXPathObjectPtr; +struct _xmlXPathObject { + xmlXPathObjectType type; + xmlNodeSetPtr nodesetval; + int boolval; + double floatval; + xmlChar *stringval; + void *user; + int index; + void *user2; + int index2; +}; + +/** + * xmlXPathConvertFunc: + * @obj: an XPath object + * @type: the number of the target type + * + * A conversion function is associated to a type and used to cast + * the new type to primitive values. + * + * Returns -1 in case of error, 0 otherwise + */ +typedef int (*xmlXPathConvertFunc) (xmlXPathObjectPtr obj, int type); + +/* + * Extra type: a name and a conversion function. + */ + +typedef struct _xmlXPathType xmlXPathType; +typedef xmlXPathType *xmlXPathTypePtr; +struct _xmlXPathType { + const xmlChar *name; /* the type name */ + xmlXPathConvertFunc func; /* the conversion function */ +}; + +/* + * Extra variable: a name and a value. + */ + +typedef struct _xmlXPathVariable xmlXPathVariable; +typedef xmlXPathVariable *xmlXPathVariablePtr; +struct _xmlXPathVariable { + const xmlChar *name; /* the variable name */ + xmlXPathObjectPtr value; /* the value */ +}; + +/** + * xmlXPathEvalFunc: + * @ctxt: an XPath parser context + * @nargs: the number of arguments passed to the function + * + * An XPath evaluation function, the parameters are on the XPath context stack. + */ + +typedef void (*xmlXPathEvalFunc)(xmlXPathParserContextPtr ctxt, + int nargs); + +/* + * Extra function: a name and a evaluation function. + */ + +typedef struct _xmlXPathFunct xmlXPathFunct; +typedef xmlXPathFunct *xmlXPathFuncPtr; +struct _xmlXPathFunct { + const xmlChar *name; /* the function name */ + xmlXPathEvalFunc func; /* the evaluation function */ +}; + +/** + * xmlXPathAxisFunc: + * @ctxt: the XPath interpreter context + * @cur: the previous node being explored on that axis + * + * An axis traversal function. To traverse an axis, the engine calls + * the first time with cur == NULL and repeat until the function returns + * NULL indicating the end of the axis traversal. + * + * Returns the next node in that axis or NULL if at the end of the axis. + */ + +typedef xmlXPathObjectPtr (*xmlXPathAxisFunc) (xmlXPathParserContextPtr ctxt, + xmlXPathObjectPtr cur); + +/* + * Extra axis: a name and an axis function. + */ + +typedef struct _xmlXPathAxis xmlXPathAxis; +typedef xmlXPathAxis *xmlXPathAxisPtr; +struct _xmlXPathAxis { + const xmlChar *name; /* the axis name */ + xmlXPathAxisFunc func; /* the search function */ +}; + +/** + * xmlXPathFunction: + * @ctxt: the XPath interprestation context + * @nargs: the number of arguments + * + * An XPath function. + * The arguments (if any) are popped out from the context stack + * and the result is pushed on the stack. + */ + +typedef void (*xmlXPathFunction) (xmlXPathParserContextPtr ctxt, int nargs); + +/* + * Function and Variable Lookup. + */ + +/** + * xmlXPathVariableLookupFunc: + * @ctxt: an XPath context + * @name: name of the variable + * @ns_uri: the namespace name hosting this variable + * + * Prototype for callbacks used to plug variable lookup in the XPath + * engine. + * + * Returns the XPath object value or NULL if not found. + */ +typedef xmlXPathObjectPtr (*xmlXPathVariableLookupFunc) (void *ctxt, + const xmlChar *name, + const xmlChar *ns_uri); + +/** + * xmlXPathFuncLookupFunc: + * @ctxt: an XPath context + * @name: name of the function + * @ns_uri: the namespace name hosting this function + * + * Prototype for callbacks used to plug function lookup in the XPath + * engine. + * + * Returns the XPath function or NULL if not found. + */ +typedef xmlXPathFunction (*xmlXPathFuncLookupFunc) (void *ctxt, + const xmlChar *name, + const xmlChar *ns_uri); + +/** + * xmlXPathFlags: + * Flags for XPath engine compilation and runtime + */ +/** + * XML_XPATH_CHECKNS: + * + * check namespaces at compilation + */ +#define XML_XPATH_CHECKNS (1<<0) +/** + * XML_XPATH_NOVAR: + * + * forbid variables in expression + */ +#define XML_XPATH_NOVAR (1<<1) + +/** + * xmlXPathContext: + * + * Expression evaluation occurs with respect to a context. + * he context consists of: + * - a node (the context node) + * - a node list (the context node list) + * - a set of variable bindings + * - a function library + * - the set of namespace declarations in scope for the expression + * Following the switch to hash tables, this need to be trimmed up at + * the next binary incompatible release. + * The node may be modified when the context is passed to libxml2 + * for an XPath evaluation so you may need to initialize it again + * before the next call. + */ + +struct _xmlXPathContext { + xmlDocPtr doc; /* The current document */ + xmlNodePtr node; /* The current node */ + + int nb_variables_unused; /* unused (hash table) */ + int max_variables_unused; /* unused (hash table) */ + xmlHashTablePtr varHash; /* Hash table of defined variables */ + + int nb_types; /* number of defined types */ + int max_types; /* max number of types */ + xmlXPathTypePtr types; /* Array of defined types */ + + int nb_funcs_unused; /* unused (hash table) */ + int max_funcs_unused; /* unused (hash table) */ + xmlHashTablePtr funcHash; /* Hash table of defined funcs */ + + int nb_axis; /* number of defined axis */ + int max_axis; /* max number of axis */ + xmlXPathAxisPtr axis; /* Array of defined axis */ + + /* the namespace nodes of the context node */ + xmlNsPtr *namespaces; /* Array of namespaces */ + int nsNr; /* number of namespace in scope */ + void *user; /* function to free */ + + /* extra variables */ + int contextSize; /* the context size */ + int proximityPosition; /* the proximity position */ + + /* extra stuff for XPointer */ + int xptr; /* is this an XPointer context? */ + xmlNodePtr here; /* for here() */ + xmlNodePtr origin; /* for origin() */ + + /* the set of namespace declarations in scope for the expression */ + xmlHashTablePtr nsHash; /* The namespaces hash table */ + xmlXPathVariableLookupFunc varLookupFunc;/* variable lookup func */ + void *varLookupData; /* variable lookup data */ + + /* Possibility to link in an extra item */ + void *extra; /* needed for XSLT */ + + /* The function name and URI when calling a function */ + const xmlChar *function; + const xmlChar *functionURI; + + /* function lookup function and data */ + xmlXPathFuncLookupFunc funcLookupFunc;/* function lookup func */ + void *funcLookupData; /* function lookup data */ + + /* temporary namespace lists kept for walking the namespace axis */ + xmlNsPtr *tmpNsList; /* Array of namespaces */ + int tmpNsNr; /* number of namespaces in scope */ + + /* error reporting mechanism */ + void *userData; /* user specific data block */ + xmlStructuredErrorFunc error; /* the callback in case of errors */ + xmlError lastError; /* the last error */ + xmlNodePtr debugNode; /* the source node XSLT */ + + /* dictionary */ + xmlDictPtr dict; /* dictionary if any */ + + int flags; /* flags to control compilation */ + + /* Cache for reusal of XPath objects */ + void *cache; + + /* Resource limits */ + unsigned long opLimit; + unsigned long opCount; + int depth; +}; + +/* + * The structure of a compiled expression form is not public. + */ + +typedef struct _xmlXPathCompExpr xmlXPathCompExpr; +typedef xmlXPathCompExpr *xmlXPathCompExprPtr; + +/** + * xmlXPathParserContext: + * + * An XPath parser context. It contains pure parsing information, + * an xmlXPathContext, and the stack of objects. + */ +struct _xmlXPathParserContext { + const xmlChar *cur; /* the current char being parsed */ + const xmlChar *base; /* the full expression */ + + int error; /* error code */ + + xmlXPathContextPtr context; /* the evaluation context */ + xmlXPathObjectPtr value; /* the current value */ + int valueNr; /* number of values stacked */ + int valueMax; /* max number of values stacked */ + xmlXPathObjectPtr *valueTab; /* stack of values */ + + xmlXPathCompExprPtr comp; /* the precompiled expression */ + int xptr; /* it this an XPointer expression */ + xmlNodePtr ancestor; /* used for walking preceding axis */ + + int valueFrame; /* always zero for compatibility */ +}; + +/************************************************************************ + * * + * Public API * + * * + ************************************************************************/ + +/** + * Objects and Nodesets handling + */ + +XMLPUBVAR double xmlXPathNAN; +XMLPUBVAR double xmlXPathPINF; +XMLPUBVAR double xmlXPathNINF; + +/* These macros may later turn into functions */ +/** + * xmlXPathNodeSetGetLength: + * @ns: a node-set + * + * Implement a functionality similar to the DOM NodeList.length. + * + * Returns the number of nodes in the node-set. + */ +#define xmlXPathNodeSetGetLength(ns) ((ns) ? (ns)->nodeNr : 0) +/** + * xmlXPathNodeSetItem: + * @ns: a node-set + * @index: index of a node in the set + * + * Implements a functionality similar to the DOM NodeList.item(). + * + * Returns the xmlNodePtr at the given @index in @ns or NULL if + * @index is out of range (0 to length-1) + */ +#define xmlXPathNodeSetItem(ns, index) \ + ((((ns) != NULL) && \ + ((index) >= 0) && ((index) < (ns)->nodeNr)) ? \ + (ns)->nodeTab[(index)] \ + : NULL) +/** + * xmlXPathNodeSetIsEmpty: + * @ns: a node-set + * + * Checks whether @ns is empty or not. + * + * Returns %TRUE if @ns is an empty node-set. + */ +#define xmlXPathNodeSetIsEmpty(ns) \ + (((ns) == NULL) || ((ns)->nodeNr == 0) || ((ns)->nodeTab == NULL)) + + +XMLPUBFUN void + xmlXPathFreeObject (xmlXPathObjectPtr obj); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeSetCreate (xmlNodePtr val); +XMLPUBFUN void + xmlXPathFreeNodeSetList (xmlXPathObjectPtr obj); +XMLPUBFUN void + xmlXPathFreeNodeSet (xmlNodeSetPtr obj); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathObjectCopy (xmlXPathObjectPtr val); +XMLPUBFUN int + xmlXPathCmpNodes (xmlNodePtr node1, + xmlNodePtr node2); +/** + * Conversion functions to basic types. + */ +XMLPUBFUN int + xmlXPathCastNumberToBoolean (double val); +XMLPUBFUN int + xmlXPathCastStringToBoolean (const xmlChar * val); +XMLPUBFUN int + xmlXPathCastNodeSetToBoolean(xmlNodeSetPtr ns); +XMLPUBFUN int + xmlXPathCastToBoolean (xmlXPathObjectPtr val); + +XMLPUBFUN double + xmlXPathCastBooleanToNumber (int val); +XMLPUBFUN double + xmlXPathCastStringToNumber (const xmlChar * val); +XMLPUBFUN double + xmlXPathCastNodeToNumber (xmlNodePtr node); +XMLPUBFUN double + xmlXPathCastNodeSetToNumber (xmlNodeSetPtr ns); +XMLPUBFUN double + xmlXPathCastToNumber (xmlXPathObjectPtr val); + +XMLPUBFUN xmlChar * + xmlXPathCastBooleanToString (int val); +XMLPUBFUN xmlChar * + xmlXPathCastNumberToString (double val); +XMLPUBFUN xmlChar * + xmlXPathCastNodeToString (xmlNodePtr node); +XMLPUBFUN xmlChar * + xmlXPathCastNodeSetToString (xmlNodeSetPtr ns); +XMLPUBFUN xmlChar * + xmlXPathCastToString (xmlXPathObjectPtr val); + +XMLPUBFUN xmlXPathObjectPtr + xmlXPathConvertBoolean (xmlXPathObjectPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathConvertNumber (xmlXPathObjectPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathConvertString (xmlXPathObjectPtr val); + +/** + * Context handling. + */ +XMLPUBFUN xmlXPathContextPtr + xmlXPathNewContext (xmlDocPtr doc); +XMLPUBFUN void + xmlXPathFreeContext (xmlXPathContextPtr ctxt); +XMLPUBFUN void + xmlXPathSetErrorHandler(xmlXPathContextPtr ctxt, + xmlStructuredErrorFunc handler, + void *context); +XMLPUBFUN int + xmlXPathContextSetCache(xmlXPathContextPtr ctxt, + int active, + int value, + int options); +/** + * Evaluation functions. + */ +XMLPUBFUN long + xmlXPathOrderDocElems (xmlDocPtr doc); +XMLPUBFUN int + xmlXPathSetContextNode (xmlNodePtr node, + xmlXPathContextPtr ctx); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNodeEval (xmlNodePtr node, + const xmlChar *str, + xmlXPathContextPtr ctx); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathEval (const xmlChar *str, + xmlXPathContextPtr ctx); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathEvalExpression (const xmlChar *str, + xmlXPathContextPtr ctxt); +XMLPUBFUN int + xmlXPathEvalPredicate (xmlXPathContextPtr ctxt, + xmlXPathObjectPtr res); +/** + * Separate compilation/evaluation entry points. + */ +XMLPUBFUN xmlXPathCompExprPtr + xmlXPathCompile (const xmlChar *str); +XMLPUBFUN xmlXPathCompExprPtr + xmlXPathCtxtCompile (xmlXPathContextPtr ctxt, + const xmlChar *str); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathCompiledEval (xmlXPathCompExprPtr comp, + xmlXPathContextPtr ctx); +XMLPUBFUN int + xmlXPathCompiledEvalToBoolean(xmlXPathCompExprPtr comp, + xmlXPathContextPtr ctxt); +XMLPUBFUN void + xmlXPathFreeCompExpr (xmlXPathCompExprPtr comp); +#endif /* LIBXML_XPATH_ENABLED */ +#if defined(LIBXML_XPATH_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XML_DEPRECATED +XMLPUBFUN void + xmlXPathInit (void); +XMLPUBFUN int + xmlXPathIsNaN (double val); +XMLPUBFUN int + xmlXPathIsInf (double val); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPATH_ENABLED or LIBXML_SCHEMAS_ENABLED*/ +#endif /* ! __XML_XPATH_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpathInternals.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpathInternals.h new file mode 100644 index 000000000..d1c90dff2 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpathInternals.h @@ -0,0 +1,633 @@ +/* + * Summary: internal interfaces for XML Path Language implementation + * Description: internal interfaces for XML Path Language implementation + * used to build new modules on top of XPath like XPointer and + * XSLT + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XPATH_INTERNALS_H__ +#define __XML_XPATH_INTERNALS_H__ + +#include +#include +#include + +#ifdef LIBXML_XPATH_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/************************************************************************ + * * + * Helpers * + * * + ************************************************************************/ + +/* + * Many of these macros may later turn into functions. They + * shouldn't be used in #ifdef's preprocessor instructions. + */ +/** + * xmlXPathSetError: + * @ctxt: an XPath parser context + * @err: an xmlXPathError code + * + * Raises an error. + */ +#define xmlXPathSetError(ctxt, err) \ + { xmlXPatherror((ctxt), __FILE__, __LINE__, (err)); \ + if ((ctxt) != NULL) (ctxt)->error = (err); } + +/** + * xmlXPathSetArityError: + * @ctxt: an XPath parser context + * + * Raises an XPATH_INVALID_ARITY error. + */ +#define xmlXPathSetArityError(ctxt) \ + xmlXPathSetError((ctxt), XPATH_INVALID_ARITY) + +/** + * xmlXPathSetTypeError: + * @ctxt: an XPath parser context + * + * Raises an XPATH_INVALID_TYPE error. + */ +#define xmlXPathSetTypeError(ctxt) \ + xmlXPathSetError((ctxt), XPATH_INVALID_TYPE) + +/** + * xmlXPathGetError: + * @ctxt: an XPath parser context + * + * Get the error code of an XPath context. + * + * Returns the context error. + */ +#define xmlXPathGetError(ctxt) ((ctxt)->error) + +/** + * xmlXPathCheckError: + * @ctxt: an XPath parser context + * + * Check if an XPath error was raised. + * + * Returns true if an error has been raised, false otherwise. + */ +#define xmlXPathCheckError(ctxt) ((ctxt)->error != XPATH_EXPRESSION_OK) + +/** + * xmlXPathGetDocument: + * @ctxt: an XPath parser context + * + * Get the document of an XPath context. + * + * Returns the context document. + */ +#define xmlXPathGetDocument(ctxt) ((ctxt)->context->doc) + +/** + * xmlXPathGetContextNode: + * @ctxt: an XPath parser context + * + * Get the context node of an XPath context. + * + * Returns the context node. + */ +#define xmlXPathGetContextNode(ctxt) ((ctxt)->context->node) + +XMLPUBFUN int + xmlXPathPopBoolean (xmlXPathParserContextPtr ctxt); +XMLPUBFUN double + xmlXPathPopNumber (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlChar * + xmlXPathPopString (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlNodeSetPtr + xmlXPathPopNodeSet (xmlXPathParserContextPtr ctxt); +XMLPUBFUN void * + xmlXPathPopExternal (xmlXPathParserContextPtr ctxt); + +/** + * xmlXPathReturnBoolean: + * @ctxt: an XPath parser context + * @val: a boolean + * + * Pushes the boolean @val on the context stack. + */ +#define xmlXPathReturnBoolean(ctxt, val) \ + valuePush((ctxt), xmlXPathNewBoolean(val)) + +/** + * xmlXPathReturnTrue: + * @ctxt: an XPath parser context + * + * Pushes true on the context stack. + */ +#define xmlXPathReturnTrue(ctxt) xmlXPathReturnBoolean((ctxt), 1) + +/** + * xmlXPathReturnFalse: + * @ctxt: an XPath parser context + * + * Pushes false on the context stack. + */ +#define xmlXPathReturnFalse(ctxt) xmlXPathReturnBoolean((ctxt), 0) + +/** + * xmlXPathReturnNumber: + * @ctxt: an XPath parser context + * @val: a double + * + * Pushes the double @val on the context stack. + */ +#define xmlXPathReturnNumber(ctxt, val) \ + valuePush((ctxt), xmlXPathNewFloat(val)) + +/** + * xmlXPathReturnString: + * @ctxt: an XPath parser context + * @str: a string + * + * Pushes the string @str on the context stack. + */ +#define xmlXPathReturnString(ctxt, str) \ + valuePush((ctxt), xmlXPathWrapString(str)) + +/** + * xmlXPathReturnEmptyString: + * @ctxt: an XPath parser context + * + * Pushes an empty string on the stack. + */ +#define xmlXPathReturnEmptyString(ctxt) \ + valuePush((ctxt), xmlXPathNewCString("")) + +/** + * xmlXPathReturnNodeSet: + * @ctxt: an XPath parser context + * @ns: a node-set + * + * Pushes the node-set @ns on the context stack. + */ +#define xmlXPathReturnNodeSet(ctxt, ns) \ + valuePush((ctxt), xmlXPathWrapNodeSet(ns)) + +/** + * xmlXPathReturnEmptyNodeSet: + * @ctxt: an XPath parser context + * + * Pushes an empty node-set on the context stack. + */ +#define xmlXPathReturnEmptyNodeSet(ctxt) \ + valuePush((ctxt), xmlXPathNewNodeSet(NULL)) + +/** + * xmlXPathReturnExternal: + * @ctxt: an XPath parser context + * @val: user data + * + * Pushes user data on the context stack. + */ +#define xmlXPathReturnExternal(ctxt, val) \ + valuePush((ctxt), xmlXPathWrapExternal(val)) + +/** + * xmlXPathStackIsNodeSet: + * @ctxt: an XPath parser context + * + * Check if the current value on the XPath stack is a node set or + * an XSLT value tree. + * + * Returns true if the current object on the stack is a node-set. + */ +#define xmlXPathStackIsNodeSet(ctxt) \ + (((ctxt)->value != NULL) \ + && (((ctxt)->value->type == XPATH_NODESET) \ + || ((ctxt)->value->type == XPATH_XSLT_TREE))) + +/** + * xmlXPathStackIsExternal: + * @ctxt: an XPath parser context + * + * Checks if the current value on the XPath stack is an external + * object. + * + * Returns true if the current object on the stack is an external + * object. + */ +#define xmlXPathStackIsExternal(ctxt) \ + ((ctxt->value != NULL) && (ctxt->value->type == XPATH_USERS)) + +/** + * xmlXPathEmptyNodeSet: + * @ns: a node-set + * + * Empties a node-set. + */ +#define xmlXPathEmptyNodeSet(ns) \ + { while ((ns)->nodeNr > 0) (ns)->nodeTab[--(ns)->nodeNr] = NULL; } + +/** + * CHECK_ERROR: + * + * Macro to return from the function if an XPath error was detected. + */ +#define CHECK_ERROR \ + if (ctxt->error != XPATH_EXPRESSION_OK) return + +/** + * CHECK_ERROR0: + * + * Macro to return 0 from the function if an XPath error was detected. + */ +#define CHECK_ERROR0 \ + if (ctxt->error != XPATH_EXPRESSION_OK) return(0) + +/** + * XP_ERROR: + * @X: the error code + * + * Macro to raise an XPath error and return. + */ +#define XP_ERROR(X) \ + { xmlXPathErr(ctxt, X); return; } + +/** + * XP_ERROR0: + * @X: the error code + * + * Macro to raise an XPath error and return 0. + */ +#define XP_ERROR0(X) \ + { xmlXPathErr(ctxt, X); return(0); } + +/** + * CHECK_TYPE: + * @typeval: the XPath type + * + * Macro to check that the value on top of the XPath stack is of a given + * type. + */ +#define CHECK_TYPE(typeval) \ + if ((ctxt->value == NULL) || (ctxt->value->type != typeval)) \ + XP_ERROR(XPATH_INVALID_TYPE) + +/** + * CHECK_TYPE0: + * @typeval: the XPath type + * + * Macro to check that the value on top of the XPath stack is of a given + * type. Return(0) in case of failure + */ +#define CHECK_TYPE0(typeval) \ + if ((ctxt->value == NULL) || (ctxt->value->type != typeval)) \ + XP_ERROR0(XPATH_INVALID_TYPE) + +/** + * CHECK_ARITY: + * @x: the number of expected args + * + * Macro to check that the number of args passed to an XPath function matches. + */ +#define CHECK_ARITY(x) \ + if (ctxt == NULL) return; \ + if (nargs != (x)) \ + XP_ERROR(XPATH_INVALID_ARITY); \ + if (ctxt->valueNr < (x)) \ + XP_ERROR(XPATH_STACK_ERROR); + +/** + * CAST_TO_STRING: + * + * Macro to try to cast the value on the top of the XPath stack to a string. + */ +#define CAST_TO_STRING \ + if ((ctxt->value != NULL) && (ctxt->value->type != XPATH_STRING)) \ + xmlXPathStringFunction(ctxt, 1); + +/** + * CAST_TO_NUMBER: + * + * Macro to try to cast the value on the top of the XPath stack to a number. + */ +#define CAST_TO_NUMBER \ + if ((ctxt->value != NULL) && (ctxt->value->type != XPATH_NUMBER)) \ + xmlXPathNumberFunction(ctxt, 1); + +/** + * CAST_TO_BOOLEAN: + * + * Macro to try to cast the value on the top of the XPath stack to a boolean. + */ +#define CAST_TO_BOOLEAN \ + if ((ctxt->value != NULL) && (ctxt->value->type != XPATH_BOOLEAN)) \ + xmlXPathBooleanFunction(ctxt, 1); + +/* + * Variable Lookup forwarding. + */ + +XMLPUBFUN void + xmlXPathRegisterVariableLookup (xmlXPathContextPtr ctxt, + xmlXPathVariableLookupFunc f, + void *data); + +/* + * Function Lookup forwarding. + */ + +XMLPUBFUN void + xmlXPathRegisterFuncLookup (xmlXPathContextPtr ctxt, + xmlXPathFuncLookupFunc f, + void *funcCtxt); + +/* + * Error reporting. + */ +XMLPUBFUN void + xmlXPatherror (xmlXPathParserContextPtr ctxt, + const char *file, + int line, + int no); + +XMLPUBFUN void + xmlXPathErr (xmlXPathParserContextPtr ctxt, + int error); + +#ifdef LIBXML_DEBUG_ENABLED +XMLPUBFUN void + xmlXPathDebugDumpObject (FILE *output, + xmlXPathObjectPtr cur, + int depth); +XMLPUBFUN void + xmlXPathDebugDumpCompExpr(FILE *output, + xmlXPathCompExprPtr comp, + int depth); +#endif +/** + * NodeSet handling. + */ +XMLPUBFUN int + xmlXPathNodeSetContains (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN xmlNodeSetPtr + xmlXPathDifference (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); +XMLPUBFUN xmlNodeSetPtr + xmlXPathIntersection (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + +XMLPUBFUN xmlNodeSetPtr + xmlXPathDistinctSorted (xmlNodeSetPtr nodes); +XMLPUBFUN xmlNodeSetPtr + xmlXPathDistinct (xmlNodeSetPtr nodes); + +XMLPUBFUN int + xmlXPathHasSameNodes (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeLeadingSorted (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathLeadingSorted (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeLeading (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathLeading (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeTrailingSorted (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathTrailingSorted (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeTrailing (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathTrailing (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + + +/** + * Extending a context. + */ + +XMLPUBFUN int + xmlXPathRegisterNs (xmlXPathContextPtr ctxt, + const xmlChar *prefix, + const xmlChar *ns_uri); +XMLPUBFUN const xmlChar * + xmlXPathNsLookup (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +XMLPUBFUN void + xmlXPathRegisteredNsCleanup (xmlXPathContextPtr ctxt); + +XMLPUBFUN int + xmlXPathRegisterFunc (xmlXPathContextPtr ctxt, + const xmlChar *name, + xmlXPathFunction f); +XMLPUBFUN int + xmlXPathRegisterFuncNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri, + xmlXPathFunction f); +XMLPUBFUN int + xmlXPathRegisterVariable (xmlXPathContextPtr ctxt, + const xmlChar *name, + xmlXPathObjectPtr value); +XMLPUBFUN int + xmlXPathRegisterVariableNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri, + xmlXPathObjectPtr value); +XMLPUBFUN xmlXPathFunction + xmlXPathFunctionLookup (xmlXPathContextPtr ctxt, + const xmlChar *name); +XMLPUBFUN xmlXPathFunction + xmlXPathFunctionLookupNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri); +XMLPUBFUN void + xmlXPathRegisteredFuncsCleanup (xmlXPathContextPtr ctxt); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathVariableLookup (xmlXPathContextPtr ctxt, + const xmlChar *name); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathVariableLookupNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri); +XMLPUBFUN void + xmlXPathRegisteredVariablesCleanup(xmlXPathContextPtr ctxt); + +/** + * Utilities to extend XPath. + */ +XMLPUBFUN xmlXPathParserContextPtr + xmlXPathNewParserContext (const xmlChar *str, + xmlXPathContextPtr ctxt); +XMLPUBFUN void + xmlXPathFreeParserContext (xmlXPathParserContextPtr ctxt); + +/* TODO: remap to xmlXPathValuePop and Push. */ +XMLPUBFUN xmlXPathObjectPtr + valuePop (xmlXPathParserContextPtr ctxt); +XMLPUBFUN int + valuePush (xmlXPathParserContextPtr ctxt, + xmlXPathObjectPtr value); + +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewString (const xmlChar *val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewCString (const char *val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapString (xmlChar *val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapCString (char * val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewFloat (double val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewBoolean (int val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewNodeSet (xmlNodePtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewValueTree (xmlNodePtr val); +XMLPUBFUN int + xmlXPathNodeSetAdd (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN int + xmlXPathNodeSetAddUnique (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN int + xmlXPathNodeSetAddNs (xmlNodeSetPtr cur, + xmlNodePtr node, + xmlNsPtr ns); +XMLPUBFUN void + xmlXPathNodeSetSort (xmlNodeSetPtr set); + +XMLPUBFUN void + xmlXPathRoot (xmlXPathParserContextPtr ctxt); +XMLPUBFUN void + xmlXPathEvalExpr (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlChar * + xmlXPathParseName (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlChar * + xmlXPathParseNCName (xmlXPathParserContextPtr ctxt); + +/* + * Existing functions. + */ +XMLPUBFUN double + xmlXPathStringEvalNumber (const xmlChar *str); +XMLPUBFUN int + xmlXPathEvaluatePredicateResult (xmlXPathParserContextPtr ctxt, + xmlXPathObjectPtr res); +XMLPUBFUN void + xmlXPathRegisterAllFunctions (xmlXPathContextPtr ctxt); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeSetMerge (xmlNodeSetPtr val1, + xmlNodeSetPtr val2); +XMLPUBFUN void + xmlXPathNodeSetDel (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN void + xmlXPathNodeSetRemove (xmlNodeSetPtr cur, + int val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewNodeSetList (xmlNodeSetPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapNodeSet (xmlNodeSetPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapExternal (void *val); + +XMLPUBFUN int xmlXPathEqualValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN int xmlXPathNotEqualValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN int xmlXPathCompareValues(xmlXPathParserContextPtr ctxt, int inf, int strict); +XMLPUBFUN void xmlXPathValueFlipSign(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathAddValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathSubValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathMultValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathDivValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathModValues(xmlXPathParserContextPtr ctxt); + +XMLPUBFUN int xmlXPathIsNodeType(const xmlChar *name); + +/* + * Some of the axis navigation routines. + */ +XMLPUBFUN xmlNodePtr xmlXPathNextSelf(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextChild(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextDescendant(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextDescendantOrSelf(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextParent(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextAncestorOrSelf(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextFollowingSibling(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextFollowing(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextNamespace(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextAttribute(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextPreceding(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextAncestor(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextPrecedingSibling(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +/* + * The official core of XPath functions. + */ +XMLPUBFUN void xmlXPathLastFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathPositionFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathCountFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathIdFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathLocalNameFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNamespaceURIFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathStringFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathStringLengthFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathConcatFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathContainsFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathStartsWithFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSubstringFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSubstringBeforeFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSubstringAfterFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNormalizeFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathTranslateFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNotFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathTrueFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathFalseFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathLangFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNumberFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSumFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathFloorFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathCeilingFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathRoundFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathBooleanFunction(xmlXPathParserContextPtr ctxt, int nargs); + +/** + * Really internal functions + */ +XMLPUBFUN void xmlXPathNodeSetFreeNs(xmlNsPtr ns); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPATH_ENABLED */ +#endif /* ! __XML_XPATH_INTERNALS_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpointer.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpointer.h new file mode 100644 index 000000000..a5260008f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxml2/libxml/xpointer.h @@ -0,0 +1,138 @@ +/* + * Summary: API to handle XML Pointers + * Description: API to handle XML Pointers + * Base implementation was made accordingly to + * W3C Candidate Recommendation 7 June 2000 + * http://www.w3.org/TR/2000/CR-xptr-20000607 + * + * Added support for the element() scheme described in: + * W3C Proposed Recommendation 13 November 2002 + * http://www.w3.org/TR/2002/PR-xptr-element-20021113/ + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XPTR_H__ +#define __XML_XPTR_H__ + +#include + +#ifdef LIBXML_XPTR_ENABLED + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(LIBXML_XPTR_LOCS_ENABLED) +/* + * A Location Set + */ +typedef struct _xmlLocationSet xmlLocationSet; +typedef xmlLocationSet *xmlLocationSetPtr; +struct _xmlLocationSet { + int locNr; /* number of locations in the set */ + int locMax; /* size of the array as allocated */ + xmlXPathObjectPtr *locTab;/* array of locations */ +}; + +/* + * Handling of location sets. + */ + +XML_DEPRECATED +XMLPUBFUN xmlLocationSetPtr + xmlXPtrLocationSetCreate (xmlXPathObjectPtr val); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrFreeLocationSet (xmlLocationSetPtr obj); +XML_DEPRECATED +XMLPUBFUN xmlLocationSetPtr + xmlXPtrLocationSetMerge (xmlLocationSetPtr val1, + xmlLocationSetPtr val2); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRange (xmlNodePtr start, + int startindex, + xmlNodePtr end, + int endindex); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangePoints (xmlXPathObjectPtr start, + xmlXPathObjectPtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangeNodePoint (xmlNodePtr start, + xmlXPathObjectPtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangePointNode (xmlXPathObjectPtr start, + xmlNodePtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangeNodes (xmlNodePtr start, + xmlNodePtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewLocationSetNodes (xmlNodePtr start, + xmlNodePtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewLocationSetNodeSet(xmlNodeSetPtr set); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangeNodeObject (xmlNodePtr start, + xmlXPathObjectPtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewCollapsedRange (xmlNodePtr start); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrLocationSetAdd (xmlLocationSetPtr cur, + xmlXPathObjectPtr val); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrWrapLocationSet (xmlLocationSetPtr val); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrLocationSetDel (xmlLocationSetPtr cur, + xmlXPathObjectPtr val); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrLocationSetRemove (xmlLocationSetPtr cur, + int val); +#endif /* defined(LIBXML_XPTR_LOCS_ENABLED) */ + +/* + * Functions. + */ +XMLPUBFUN xmlXPathContextPtr + xmlXPtrNewContext (xmlDocPtr doc, + xmlNodePtr here, + xmlNodePtr origin); +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrEval (const xmlChar *str, + xmlXPathContextPtr ctx); + +#if defined(LIBXML_XPTR_LOCS_ENABLED) +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrRangeToFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XML_DEPRECATED +XMLPUBFUN xmlNodePtr + xmlXPtrBuildNodeList (xmlXPathObjectPtr obj); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrEvalRangePredicate (xmlXPathParserContextPtr ctxt); +#endif /* defined(LIBXML_XPTR_LOCS_ENABLED) */ +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPTR_ENABLED */ +#endif /* __XML_XPTR_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/attributes.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/attributes.h new file mode 100644 index 000000000..d9b99a74a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/attributes.h @@ -0,0 +1,39 @@ +/* + * Summary: interface for the XSLT attribute handling + * Description: this module handles the specificities of attribute + * and attribute groups processing. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_ATTRIBUTES_H__ +#define __XML_XSLT_ATTRIBUTES_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +XSLTPUBFUN void XSLTCALL + xsltParseStylesheetAttributeSet (xsltStylesheetPtr style, + xmlNodePtr cur); +XSLTPUBFUN void XSLTCALL + xsltFreeAttributeSetsHashes (xsltStylesheetPtr style); +XSLTPUBFUN void XSLTCALL + xsltApplyAttributeSet (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + const xmlChar *attributes); +XSLTPUBFUN void XSLTCALL + xsltResolveStylesheetAttributeSet(xsltStylesheetPtr style); +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_ATTRIBUTES_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/documents.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/documents.h new file mode 100644 index 000000000..ae7c0ca24 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/documents.h @@ -0,0 +1,93 @@ +/* + * Summary: interface for the document handling + * Description: implements document loading and cache (multiple + * document() reference for the same resources must + * be equal. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_DOCUMENTS_H__ +#define __XML_XSLT_DOCUMENTS_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +XSLTPUBFUN xsltDocumentPtr XSLTCALL + xsltNewDocument (xsltTransformContextPtr ctxt, + xmlDocPtr doc); +XSLTPUBFUN xsltDocumentPtr XSLTCALL + xsltLoadDocument (xsltTransformContextPtr ctxt, + const xmlChar *URI); +XSLTPUBFUN xsltDocumentPtr XSLTCALL + xsltFindDocument (xsltTransformContextPtr ctxt, + xmlDocPtr doc); +XSLTPUBFUN void XSLTCALL + xsltFreeDocuments (xsltTransformContextPtr ctxt); + +XSLTPUBFUN xsltDocumentPtr XSLTCALL + xsltLoadStyleDocument (xsltStylesheetPtr style, + const xmlChar *URI); +XSLTPUBFUN xsltDocumentPtr XSLTCALL + xsltNewStyleDocument (xsltStylesheetPtr style, + xmlDocPtr doc); +XSLTPUBFUN void XSLTCALL + xsltFreeStyleDocuments (xsltStylesheetPtr style); + +/* + * Hooks for document loading + */ + +/** + * xsltLoadType: + * + * Enum defining the kind of loader requirement. + */ +typedef enum { + XSLT_LOAD_START = 0, /* loading for a top stylesheet */ + XSLT_LOAD_STYLESHEET = 1, /* loading for a stylesheet include/import */ + XSLT_LOAD_DOCUMENT = 2 /* loading document at transformation time */ +} xsltLoadType; + +/** + * xsltDocLoaderFunc: + * @URI: the URI of the document to load + * @dict: the dictionary to use when parsing that document + * @options: parsing options, a set of xmlParserOption + * @ctxt: the context, either a stylesheet or a transformation context + * @type: the xsltLoadType indicating the kind of loading required + * + * An xsltDocLoaderFunc is a signature for a function which can be + * registered to load document not provided by the compilation or + * transformation API themselve, for example when an xsl:import, + * xsl:include is found at compilation time or when a document() + * call is made at runtime. + * + * Returns the pointer to the document (which will be modified and + * freed by the engine later), or NULL in case of error. + */ +typedef xmlDocPtr (*xsltDocLoaderFunc) (const xmlChar *URI, + xmlDictPtr dict, + int options, + void *ctxt, + xsltLoadType type); + +XSLTPUBFUN void XSLTCALL + xsltSetLoaderFunc (xsltDocLoaderFunc f); + +/* the loader may be needed by extension libraries so it is exported */ +XSLTPUBVAR xsltDocLoaderFunc xsltDocDefaultLoader; + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_DOCUMENTS_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extensions.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extensions.h new file mode 100644 index 000000000..84d6aa44a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extensions.h @@ -0,0 +1,262 @@ +/* + * Summary: interface for the extension support + * Description: This provide the API needed for simple and module + * extension support. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_EXTENSION_H__ +#define __XML_XSLT_EXTENSION_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * Extension Modules API. + */ + +/** + * xsltInitGlobals: + * + * Initialize the global variables for extensions + * + */ + +XSLTPUBFUN void XSLTCALL + xsltInitGlobals (void); + +/** + * xsltStyleExtInitFunction: + * @ctxt: an XSLT stylesheet + * @URI: the namespace URI for the extension + * + * A function called at initialization time of an XSLT extension module. + * + * Returns a pointer to the module specific data for this transformation. + */ +typedef void * (*xsltStyleExtInitFunction) (xsltStylesheetPtr style, + const xmlChar *URI); + +/** + * xsltStyleExtShutdownFunction: + * @ctxt: an XSLT stylesheet + * @URI: the namespace URI for the extension + * @data: the data associated to this module + * + * A function called at shutdown time of an XSLT extension module. + */ +typedef void (*xsltStyleExtShutdownFunction) (xsltStylesheetPtr style, + const xmlChar *URI, + void *data); + +/** + * xsltExtInitFunction: + * @ctxt: an XSLT transformation context + * @URI: the namespace URI for the extension + * + * A function called at initialization time of an XSLT extension module. + * + * Returns a pointer to the module specific data for this transformation. + */ +typedef void * (*xsltExtInitFunction) (xsltTransformContextPtr ctxt, + const xmlChar *URI); + +/** + * xsltExtShutdownFunction: + * @ctxt: an XSLT transformation context + * @URI: the namespace URI for the extension + * @data: the data associated to this module + * + * A function called at shutdown time of an XSLT extension module. + */ +typedef void (*xsltExtShutdownFunction) (xsltTransformContextPtr ctxt, + const xmlChar *URI, + void *data); + +XSLTPUBFUN int XSLTCALL + xsltRegisterExtModule (const xmlChar *URI, + xsltExtInitFunction initFunc, + xsltExtShutdownFunction shutdownFunc); +XSLTPUBFUN int XSLTCALL + xsltRegisterExtModuleFull + (const xmlChar * URI, + xsltExtInitFunction initFunc, + xsltExtShutdownFunction shutdownFunc, + xsltStyleExtInitFunction styleInitFunc, + xsltStyleExtShutdownFunction styleShutdownFunc); + +XSLTPUBFUN int XSLTCALL + xsltUnregisterExtModule (const xmlChar * URI); + +XSLTPUBFUN void * XSLTCALL + xsltGetExtData (xsltTransformContextPtr ctxt, + const xmlChar *URI); + +XSLTPUBFUN void * XSLTCALL + xsltStyleGetExtData (xsltStylesheetPtr style, + const xmlChar *URI); +#ifdef XSLT_REFACTORED +XSLTPUBFUN void * XSLTCALL + xsltStyleStylesheetLevelGetExtData( + xsltStylesheetPtr style, + const xmlChar * URI); +#endif +XSLTPUBFUN void XSLTCALL + xsltShutdownCtxtExts (xsltTransformContextPtr ctxt); + +XSLTPUBFUN void XSLTCALL + xsltShutdownExts (xsltStylesheetPtr style); + +XSLTPUBFUN xsltTransformContextPtr XSLTCALL + xsltXPathGetTransformContext + (xmlXPathParserContextPtr ctxt); + +/* + * extension functions +*/ +XSLTPUBFUN int XSLTCALL + xsltRegisterExtModuleFunction + (const xmlChar *name, + const xmlChar *URI, + xmlXPathFunction function); +XSLTPUBFUN xmlXPathFunction XSLTCALL + xsltExtModuleFunctionLookup (const xmlChar *name, + const xmlChar *URI); +XSLTPUBFUN int XSLTCALL + xsltUnregisterExtModuleFunction + (const xmlChar *name, + const xmlChar *URI); + +/* + * extension elements + */ +typedef xsltElemPreCompPtr (*xsltPreComputeFunction) + (xsltStylesheetPtr style, + xmlNodePtr inst, + xsltTransformFunction function); + +XSLTPUBFUN xsltElemPreCompPtr XSLTCALL + xsltNewElemPreComp (xsltStylesheetPtr style, + xmlNodePtr inst, + xsltTransformFunction function); +XSLTPUBFUN void XSLTCALL + xsltInitElemPreComp (xsltElemPreCompPtr comp, + xsltStylesheetPtr style, + xmlNodePtr inst, + xsltTransformFunction function, + xsltElemPreCompDeallocator freeFunc); + +XSLTPUBFUN int XSLTCALL + xsltRegisterExtModuleElement + (const xmlChar *name, + const xmlChar *URI, + xsltPreComputeFunction precomp, + xsltTransformFunction transform); +XSLTPUBFUN xsltTransformFunction XSLTCALL + xsltExtElementLookup (xsltTransformContextPtr ctxt, + const xmlChar *name, + const xmlChar *URI); +XSLTPUBFUN xsltTransformFunction XSLTCALL + xsltExtModuleElementLookup + (const xmlChar *name, + const xmlChar *URI); +XSLTPUBFUN xsltPreComputeFunction XSLTCALL + xsltExtModuleElementPreComputeLookup + (const xmlChar *name, + const xmlChar *URI); +XSLTPUBFUN int XSLTCALL + xsltUnregisterExtModuleElement + (const xmlChar *name, + const xmlChar *URI); + +/* + * top-level elements + */ +typedef void (*xsltTopLevelFunction) (xsltStylesheetPtr style, + xmlNodePtr inst); + +XSLTPUBFUN int XSLTCALL + xsltRegisterExtModuleTopLevel + (const xmlChar *name, + const xmlChar *URI, + xsltTopLevelFunction function); +XSLTPUBFUN xsltTopLevelFunction XSLTCALL + xsltExtModuleTopLevelLookup + (const xmlChar *name, + const xmlChar *URI); +XSLTPUBFUN int XSLTCALL + xsltUnregisterExtModuleTopLevel + (const xmlChar *name, + const xmlChar *URI); + + +/* These 2 functions are deprecated for use within modules. */ +XSLTPUBFUN int XSLTCALL + xsltRegisterExtFunction (xsltTransformContextPtr ctxt, + const xmlChar *name, + const xmlChar *URI, + xmlXPathFunction function); +XSLTPUBFUN int XSLTCALL + xsltRegisterExtElement (xsltTransformContextPtr ctxt, + const xmlChar *name, + const xmlChar *URI, + xsltTransformFunction function); + +/* + * Extension Prefix handling API. + * Those are used by the XSLT (pre)processor. + */ + +XSLTPUBFUN int XSLTCALL + xsltRegisterExtPrefix (xsltStylesheetPtr style, + const xmlChar *prefix, + const xmlChar *URI); +XSLTPUBFUN int XSLTCALL + xsltCheckExtPrefix (xsltStylesheetPtr style, + const xmlChar *URI); +XSLTPUBFUN int XSLTCALL + xsltCheckExtURI (xsltStylesheetPtr style, + const xmlChar *URI); +XSLTPUBFUN int XSLTCALL + xsltInitCtxtExts (xsltTransformContextPtr ctxt); +XSLTPUBFUN void XSLTCALL + xsltFreeCtxtExts (xsltTransformContextPtr ctxt); +XSLTPUBFUN void XSLTCALL + xsltFreeExts (xsltStylesheetPtr style); + +XSLTPUBFUN xsltElemPreCompPtr XSLTCALL + xsltPreComputeExtModuleElement + (xsltStylesheetPtr style, + xmlNodePtr inst); +/* + * Extension Infos access. + * Used by exslt initialisation + */ + +XSLTPUBFUN xmlHashTablePtr XSLTCALL + xsltGetExtInfo (xsltStylesheetPtr style, + const xmlChar *URI); + +/** + * Test of the extension module API + */ +XSLTPUBFUN void XSLTCALL + xsltRegisterTestModule (void); +XSLTPUBFUN void XSLTCALL + xsltDebugDumpExtensions (FILE * output); + + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_EXTENSION_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extra.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extra.h new file mode 100644 index 000000000..e512fd03d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/extra.h @@ -0,0 +1,72 @@ +/* + * Summary: interface for the non-standard features + * Description: implement some extension outside the XSLT namespace + * but not EXSLT with is in a different library. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_EXTRA_H__ +#define __XML_XSLT_EXTRA_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XSLT_LIBXSLT_NAMESPACE: + * + * This is the libxslt namespace for specific extensions. + */ +#define XSLT_LIBXSLT_NAMESPACE ((xmlChar *) "http://xmlsoft.org/XSLT/namespace") + +/** + * XSLT_SAXON_NAMESPACE: + * + * This is Michael Kay's Saxon processor namespace for extensions. + */ +#define XSLT_SAXON_NAMESPACE ((xmlChar *) "http://icl.com/saxon") + +/** + * XSLT_XT_NAMESPACE: + * + * This is James Clark's XT processor namespace for extensions. + */ +#define XSLT_XT_NAMESPACE ((xmlChar *) "http://www.jclark.com/xt") + +/** + * XSLT_XALAN_NAMESPACE: + * + * This is the Apache project XALAN processor namespace for extensions. + */ +#define XSLT_XALAN_NAMESPACE ((xmlChar *) \ + "org.apache.xalan.xslt.extensions.Redirect") + + +XSLTPUBFUN void XSLTCALL + xsltFunctionNodeSet (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltDebug (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); + + +XSLTPUBFUN void XSLTCALL + xsltRegisterExtras (xsltTransformContextPtr ctxt); +XSLTPUBFUN void XSLTCALL + xsltRegisterAllExtras (void); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_EXTRA_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/functions.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/functions.h new file mode 100644 index 000000000..5455b7f47 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/functions.h @@ -0,0 +1,78 @@ +/* + * Summary: interface for the XSLT functions not from XPath + * Description: a set of extra functions coming from XSLT but not in XPath + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard and Bjorn Reese + */ + +#ifndef __XML_XSLT_FUNCTIONS_H__ +#define __XML_XSLT_FUNCTIONS_H__ + +#include +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XSLT_REGISTER_FUNCTION_LOOKUP: + * + * Registering macro, not general purpose at all but used in different modules. + */ +#define XSLT_REGISTER_FUNCTION_LOOKUP(ctxt) \ + xmlXPathRegisterFuncLookup((ctxt)->xpathCtxt, \ + xsltXPathFunctionLookup, \ + (void *)(ctxt->xpathCtxt)); + +XSLTPUBFUN xmlXPathFunction XSLTCALL + xsltXPathFunctionLookup (void *vctxt, + const xmlChar *name, + const xmlChar *ns_uri); + +/* + * Interfaces for the functions implementations. + */ + +XSLTPUBFUN void XSLTCALL + xsltDocumentFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltKeyFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltUnparsedEntityURIFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltFormatNumberFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltGenerateIdFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltSystemPropertyFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltElementAvailableFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XSLTPUBFUN void XSLTCALL + xsltFunctionAvailableFunction (xmlXPathParserContextPtr ctxt, + int nargs); + +/* + * And the registration + */ + +XSLTPUBFUN void XSLTCALL + xsltRegisterAllFunctions (xmlXPathContextPtr ctxt); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_FUNCTIONS_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/imports.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/imports.h new file mode 100644 index 000000000..95e44e51d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/imports.h @@ -0,0 +1,75 @@ +/* + * Summary: interface for the XSLT import support + * Description: macros and fuctions needed to implement and + * access the import tree + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_IMPORTS_H__ +#define __XML_XSLT_IMPORTS_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XSLT_GET_IMPORT_PTR: + * + * A macro to import pointers from the stylesheet cascading order. + */ +#define XSLT_GET_IMPORT_PTR(res, style, name) { \ + xsltStylesheetPtr st = style; \ + res = NULL; \ + while (st != NULL) { \ + if (st->name != NULL) { res = st->name; break; } \ + st = xsltNextImport(st); \ + }} + +/** + * XSLT_GET_IMPORT_INT: + * + * A macro to import intergers from the stylesheet cascading order. + */ +#define XSLT_GET_IMPORT_INT(res, style, name) { \ + xsltStylesheetPtr st = style; \ + res = -1; \ + while (st != NULL) { \ + if (st->name != -1) { res = st->name; break; } \ + st = xsltNextImport(st); \ + }} + +/* + * Module interfaces + */ +XSLTPUBFUN int XSLTCALL + xsltParseStylesheetImport(xsltStylesheetPtr style, + xmlNodePtr cur); +XSLTPUBFUN int XSLTCALL + xsltParseStylesheetInclude + (xsltStylesheetPtr style, + xmlNodePtr cur); +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltNextImport (xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltNeedElemSpaceHandling(xsltTransformContextPtr ctxt); +XSLTPUBFUN int XSLTCALL + xsltFindElemSpaceHandling(xsltTransformContextPtr ctxt, + xmlNodePtr node); +XSLTPUBFUN xsltTemplatePtr XSLTCALL + xsltFindTemplate (xsltTransformContextPtr ctxt, + const xmlChar *name, + const xmlChar *nameURI); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_IMPORTS_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/keys.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/keys.h new file mode 100644 index 000000000..757d12246 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/keys.h @@ -0,0 +1,53 @@ +/* + * Summary: interface for the key matching used in key() and template matches. + * Description: implementation of the key mechanims. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_KEY_H__ +#define __XML_XSLT_KEY_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * NODE_IS_KEYED: + * + * check for bit 15 set + */ +#define NODE_IS_KEYED (1 >> 15) + +XSLTPUBFUN int XSLTCALL + xsltAddKey (xsltStylesheetPtr style, + const xmlChar *name, + const xmlChar *nameURI, + const xmlChar *match, + const xmlChar *use, + xmlNodePtr inst); +XSLTPUBFUN xmlNodeSetPtr XSLTCALL + xsltGetKey (xsltTransformContextPtr ctxt, + const xmlChar *name, + const xmlChar *nameURI, + const xmlChar *value); +XSLTPUBFUN void XSLTCALL + xsltInitCtxtKeys (xsltTransformContextPtr ctxt, + xsltDocumentPtr doc); +XSLTPUBFUN void XSLTCALL + xsltFreeKeys (xsltStylesheetPtr style); +XSLTPUBFUN void XSLTCALL + xsltFreeDocumentKeys (xsltDocumentPtr doc); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/namespaces.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/namespaces.h new file mode 100644 index 000000000..fa2d3b4ce --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/namespaces.h @@ -0,0 +1,68 @@ +/* + * Summary: interface for the XSLT namespace handling + * Description: set of function easing the processing and generation + * of namespace nodes in XSLT. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_NAMESPACES_H__ +#define __XML_XSLT_NAMESPACES_H__ + +#include +#include "xsltexports.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Used within nsAliases hashtable when the default namespace is required + * but it's not been explicitly defined + */ +/** + * UNDEFINED_DEFAULT_NS: + * + * Special value for undefined namespace, internal + */ +#define UNDEFINED_DEFAULT_NS (const xmlChar *) -1L + +XSLTPUBFUN void XSLTCALL + xsltNamespaceAlias (xsltStylesheetPtr style, + xmlNodePtr node); +XSLTPUBFUN xmlNsPtr XSLTCALL + xsltGetNamespace (xsltTransformContextPtr ctxt, + xmlNodePtr cur, + xmlNsPtr ns, + xmlNodePtr out); +XSLTPUBFUN xmlNsPtr XSLTCALL + xsltGetPlainNamespace (xsltTransformContextPtr ctxt, + xmlNodePtr cur, + xmlNsPtr ns, + xmlNodePtr out); +XSLTPUBFUN xmlNsPtr XSLTCALL + xsltGetSpecialNamespace (xsltTransformContextPtr ctxt, + xmlNodePtr cur, + const xmlChar *URI, + const xmlChar *prefix, + xmlNodePtr out); +XSLTPUBFUN xmlNsPtr XSLTCALL + xsltCopyNamespace (xsltTransformContextPtr ctxt, + xmlNodePtr elem, + xmlNsPtr ns); +XSLTPUBFUN xmlNsPtr XSLTCALL + xsltCopyNamespaceList (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNsPtr cur); +XSLTPUBFUN void XSLTCALL + xsltFreeNamespaceAliasHashes + (xsltStylesheetPtr style); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_NAMESPACES_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/numbersInternals.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/numbersInternals.h new file mode 100644 index 000000000..852459281 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/numbersInternals.h @@ -0,0 +1,73 @@ +/* + * Summary: Implementation of the XSLT number functions + * Description: Implementation of the XSLT number functions + * + * Copy: See Copyright for the status of this software. + * + * Author: Bjorn Reese and Daniel Veillard + */ + +#ifndef __XML_XSLT_NUMBERSINTERNALS_H__ +#define __XML_XSLT_NUMBERSINTERNALS_H__ + +#include +#include "xsltexports.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct _xsltCompMatch; + +/** + * xsltNumberData: + * + * This data structure is just a wrapper to pass xsl:number data in. + */ +typedef struct _xsltNumberData xsltNumberData; +typedef xsltNumberData *xsltNumberDataPtr; + +struct _xsltNumberData { + const xmlChar *level; + const xmlChar *count; + const xmlChar *from; + const xmlChar *value; + const xmlChar *format; + int has_format; + int digitsPerGroup; + int groupingCharacter; + int groupingCharacterLen; + xmlDocPtr doc; + xmlNodePtr node; + struct _xsltCompMatch *countPat; + struct _xsltCompMatch *fromPat; + + /* + * accelerators + */ +}; + +/** + * xsltFormatNumberInfo,: + * + * This data structure lists the various parameters needed to format numbers. + */ +typedef struct _xsltFormatNumberInfo xsltFormatNumberInfo; +typedef xsltFormatNumberInfo *xsltFormatNumberInfoPtr; + +struct _xsltFormatNumberInfo { + int integer_hash; /* Number of '#' in integer part */ + int integer_digits; /* Number of '0' in integer part */ + int frac_digits; /* Number of '0' in fractional part */ + int frac_hash; /* Number of '#' in fractional part */ + int group; /* Number of chars per display 'group' */ + int multiplier; /* Scaling for percent or permille */ + char add_decimal; /* Flag for whether decimal point appears in pattern */ + char is_multiplier_set; /* Flag to catch multiple occurences of percent/permille */ + char is_negative_pattern;/* Flag for processing -ve prefix/suffix */ +}; + +#ifdef __cplusplus +} +#endif +#endif /* __XML_XSLT_NUMBERSINTERNALS_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/pattern.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/pattern.h new file mode 100644 index 000000000..a0991c0ce --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/pattern.h @@ -0,0 +1,84 @@ +/* + * Summary: interface for the pattern matching used in template matches. + * Description: the implementation of the lookup of the right template + * for a given node must be really fast in order to keep + * decent performances. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_PATTERN_H__ +#define __XML_XSLT_PATTERN_H__ + +#include "xsltInternals.h" +#include "xsltexports.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xsltCompMatch: + * + * Data structure used for the implementation of patterns. + * It is kept private (in pattern.c). + */ +typedef struct _xsltCompMatch xsltCompMatch; +typedef xsltCompMatch *xsltCompMatchPtr; + +/* + * Pattern related interfaces. + */ + +XSLTPUBFUN xsltCompMatchPtr XSLTCALL + xsltCompilePattern (const xmlChar *pattern, + xmlDocPtr doc, + xmlNodePtr node, + xsltStylesheetPtr style, + xsltTransformContextPtr runtime); +XSLTPUBFUN void XSLTCALL + xsltFreeCompMatchList (xsltCompMatchPtr comp); +XSLTPUBFUN int XSLTCALL + xsltTestCompMatchList (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xsltCompMatchPtr comp); +XSLTPUBFUN void XSLTCALL + xsltCompMatchClearCache (xsltTransformContextPtr ctxt, + xsltCompMatchPtr comp); +XSLTPUBFUN void XSLTCALL + xsltNormalizeCompSteps (void *payload, + void *data, + const xmlChar *name); + +/* + * Template related interfaces. + */ +XSLTPUBFUN int XSLTCALL + xsltAddTemplate (xsltStylesheetPtr style, + xsltTemplatePtr cur, + const xmlChar *mode, + const xmlChar *modeURI); +XSLTPUBFUN xsltTemplatePtr XSLTCALL + xsltGetTemplate (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xsltStylesheetPtr style); +XSLTPUBFUN void XSLTCALL + xsltFreeTemplateHashes (xsltStylesheetPtr style); +XSLTPUBFUN void XSLTCALL + xsltCleanupTemplates (xsltStylesheetPtr style); + +#if 0 +int xsltMatchPattern (xsltTransformContextPtr ctxt, + xmlNodePtr node, + const xmlChar *pattern, + xmlDocPtr ctxtdoc, + xmlNodePtr ctxtnode); +#endif +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_PATTERN_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/preproc.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/preproc.h new file mode 100644 index 000000000..2a2fc7e43 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/preproc.h @@ -0,0 +1,43 @@ +/* + * Summary: precomputing stylesheets + * Description: this is the compilation phase, where most of the + * stylesheet is "compiled" into faster to use data. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_PRECOMP_H__ +#define __XML_XSLT_PRECOMP_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Interfaces + */ +XSLTPUBVAR const xmlChar *xsltExtMarker; + +XSLTPUBFUN xsltElemPreCompPtr XSLTCALL + xsltDocumentComp (xsltStylesheetPtr style, + xmlNodePtr inst, + xsltTransformFunction function); + +XSLTPUBFUN void XSLTCALL + xsltStylePreCompute (xsltStylesheetPtr style, + xmlNodePtr inst); +XSLTPUBFUN void XSLTCALL + xsltFreeStylePreComps (xsltStylesheetPtr style); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_PRECOMP_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/security.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/security.h new file mode 100644 index 000000000..bab5c8c6b --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/security.h @@ -0,0 +1,104 @@ +/* + * Summary: interface for the libxslt security framework + * Description: the libxslt security framework allow to restrict + * the access to new resources (file or URL) from + * the stylesheet at runtime. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_SECURITY_H__ +#define __XML_XSLT_SECURITY_H__ + +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xsltSecurityPref: + * + * structure to indicate the preferences for security in the XSLT + * transformation. + */ +typedef struct _xsltSecurityPrefs xsltSecurityPrefs; +typedef xsltSecurityPrefs *xsltSecurityPrefsPtr; + +/** + * xsltSecurityOption: + * + * the set of option that can be configured + */ +typedef enum { + XSLT_SECPREF_READ_FILE = 1, + XSLT_SECPREF_WRITE_FILE, + XSLT_SECPREF_CREATE_DIRECTORY, + XSLT_SECPREF_READ_NETWORK, + XSLT_SECPREF_WRITE_NETWORK +} xsltSecurityOption; + +/** + * xsltSecurityCheck: + * + * User provided function to check the value of a string like a file + * path or an URL ... + */ +typedef int (*xsltSecurityCheck) (xsltSecurityPrefsPtr sec, + xsltTransformContextPtr ctxt, + const char *value); + +/* + * Module interfaces + */ +XSLTPUBFUN xsltSecurityPrefsPtr XSLTCALL + xsltNewSecurityPrefs (void); +XSLTPUBFUN void XSLTCALL + xsltFreeSecurityPrefs (xsltSecurityPrefsPtr sec); +XSLTPUBFUN int XSLTCALL + xsltSetSecurityPrefs (xsltSecurityPrefsPtr sec, + xsltSecurityOption option, + xsltSecurityCheck func); +XSLTPUBFUN xsltSecurityCheck XSLTCALL + xsltGetSecurityPrefs (xsltSecurityPrefsPtr sec, + xsltSecurityOption option); + +XSLTPUBFUN void XSLTCALL + xsltSetDefaultSecurityPrefs (xsltSecurityPrefsPtr sec); +XSLTPUBFUN xsltSecurityPrefsPtr XSLTCALL + xsltGetDefaultSecurityPrefs (void); + +XSLTPUBFUN int XSLTCALL + xsltSetCtxtSecurityPrefs (xsltSecurityPrefsPtr sec, + xsltTransformContextPtr ctxt); + +XSLTPUBFUN int XSLTCALL + xsltSecurityAllow (xsltSecurityPrefsPtr sec, + xsltTransformContextPtr ctxt, + const char *value); +XSLTPUBFUN int XSLTCALL + xsltSecurityForbid (xsltSecurityPrefsPtr sec, + xsltTransformContextPtr ctxt, + const char *value); +/* + * internal interfaces + */ +XSLTPUBFUN int XSLTCALL + xsltCheckWrite (xsltSecurityPrefsPtr sec, + xsltTransformContextPtr ctxt, + const xmlChar *URL); +XSLTPUBFUN int XSLTCALL + xsltCheckRead (xsltSecurityPrefsPtr sec, + xsltTransformContextPtr ctxt, + const xmlChar *URL); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_SECURITY_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/templates.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/templates.h new file mode 100644 index 000000000..84a9de4d3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/templates.h @@ -0,0 +1,77 @@ +/* + * Summary: interface for the template processing + * Description: This set of routine encapsulates XPath calls + * and Attribute Value Templates evaluation. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_TEMPLATES_H__ +#define __XML_XSLT_TEMPLATES_H__ + +#include +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +XSLTPUBFUN int XSLTCALL + xsltEvalXPathPredicate (xsltTransformContextPtr ctxt, + xmlXPathCompExprPtr comp, + xmlNsPtr *nsList, + int nsNr); +XSLTPUBFUN xmlChar * XSLTCALL + xsltEvalTemplateString (xsltTransformContextPtr ctxt, + xmlNodePtr contextNode, + xmlNodePtr inst); +XSLTPUBFUN xmlChar * XSLTCALL + xsltEvalAttrValueTemplate (xsltTransformContextPtr ctxt, + xmlNodePtr node, + const xmlChar *name, + const xmlChar *ns); +XSLTPUBFUN const xmlChar * XSLTCALL + xsltEvalStaticAttrValueTemplate (xsltStylesheetPtr style, + xmlNodePtr node, + const xmlChar *name, + const xmlChar *ns, + int *found); + +/* TODO: this is obviously broken ... the namespaces should be passed too ! */ +XSLTPUBFUN xmlChar * XSLTCALL + xsltEvalXPathString (xsltTransformContextPtr ctxt, + xmlXPathCompExprPtr comp); +XSLTPUBFUN xmlChar * XSLTCALL + xsltEvalXPathStringNs (xsltTransformContextPtr ctxt, + xmlXPathCompExprPtr comp, + int nsNr, + xmlNsPtr *nsList); + +XSLTPUBFUN xmlNodePtr * XSLTCALL + xsltTemplateProcess (xsltTransformContextPtr ctxt, + xmlNodePtr node); +XSLTPUBFUN xmlAttrPtr XSLTCALL + xsltAttrListTemplateProcess (xsltTransformContextPtr ctxt, + xmlNodePtr target, + xmlAttrPtr cur); +XSLTPUBFUN xmlAttrPtr XSLTCALL + xsltAttrTemplateProcess (xsltTransformContextPtr ctxt, + xmlNodePtr target, + xmlAttrPtr attr); +XSLTPUBFUN xmlChar * XSLTCALL + xsltAttrTemplateValueProcess (xsltTransformContextPtr ctxt, + const xmlChar* attr); +XSLTPUBFUN xmlChar * XSLTCALL + xsltAttrTemplateValueProcessNode(xsltTransformContextPtr ctxt, + const xmlChar* str, + xmlNodePtr node); +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_TEMPLATES_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/transform.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/transform.h new file mode 100644 index 000000000..5a6f79591 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/transform.h @@ -0,0 +1,207 @@ +/* + * Summary: the XSLT engine transformation part. + * Description: This module implements the bulk of the actual + * transformation processing. Most of the xsl: element + * constructs are implemented in this module. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_TRANSFORM_H__ +#define __XML_XSLT_TRANSFORM_H__ + +#include +#include +#include "xsltexports.h" +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XInclude default processing. + */ +XSLTPUBFUN void XSLTCALL + xsltSetXIncludeDefault (int xinclude); +XSLTPUBFUN int XSLTCALL + xsltGetXIncludeDefault (void); + +/** + * Export context to users. + */ +XSLTPUBFUN xsltTransformContextPtr XSLTCALL + xsltNewTransformContext (xsltStylesheetPtr style, + xmlDocPtr doc); + +XSLTPUBFUN void XSLTCALL + xsltFreeTransformContext(xsltTransformContextPtr ctxt); + +XSLTPUBFUN xmlDocPtr XSLTCALL + xsltApplyStylesheetUser (xsltStylesheetPtr style, + xmlDocPtr doc, + const char **params, + const char *output, + FILE * profile, + xsltTransformContextPtr userCtxt); +XSLTPUBFUN void XSLTCALL + xsltProcessOneNode (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xsltStackElemPtr params); +/** + * Private Interfaces. + */ +XSLTPUBFUN void XSLTCALL + xsltApplyStripSpaces (xsltTransformContextPtr ctxt, + xmlNodePtr node); +XSLTPUBFUN xmlDocPtr XSLTCALL + xsltApplyStylesheet (xsltStylesheetPtr style, + xmlDocPtr doc, + const char **params); +XSLTPUBFUN xmlDocPtr XSLTCALL + xsltProfileStylesheet (xsltStylesheetPtr style, + xmlDocPtr doc, + const char **params, + FILE * output); +XSLTPUBFUN int XSLTCALL + xsltRunStylesheet (xsltStylesheetPtr style, + xmlDocPtr doc, + const char **params, + const char *output, + xmlSAXHandlerPtr SAX, + xmlOutputBufferPtr IObuf); +XSLTPUBFUN int XSLTCALL + xsltRunStylesheetUser (xsltStylesheetPtr style, + xmlDocPtr doc, + const char **params, + const char *output, + xmlSAXHandlerPtr SAX, + xmlOutputBufferPtr IObuf, + FILE * profile, + xsltTransformContextPtr userCtxt); +XSLTPUBFUN void XSLTCALL + xsltApplyOneTemplate (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr list, + xsltTemplatePtr templ, + xsltStackElemPtr params); +XSLTPUBFUN void XSLTCALL + xsltDocumentElem (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltSort (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltCopy (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltText (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltElement (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltComment (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltAttribute (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltProcessingInstruction(xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltCopyOf (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltValueOf (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltNumber (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltApplyImports (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltCallTemplate (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltApplyTemplates (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltChoose (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltIf (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltForEach (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); +XSLTPUBFUN void XSLTCALL + xsltRegisterAllElement (xsltTransformContextPtr ctxt); + +XSLTPUBFUN xmlNodePtr XSLTCALL + xsltCopyTextString (xsltTransformContextPtr ctxt, + xmlNodePtr target, + const xmlChar *string, + int noescape); + +/* Following 2 functions needed for libexslt/functions.c */ +XSLTPUBFUN void XSLTCALL + xsltLocalVariablePop (xsltTransformContextPtr ctxt, + int limitNr, + int level); +XSLTPUBFUN int XSLTCALL + xsltLocalVariablePush (xsltTransformContextPtr ctxt, + xsltStackElemPtr variable, + int level); +/* + * Hook for the debugger if activated. + */ +XSLTPUBFUN void XSLTCALL + xslHandleDebugger (xmlNodePtr cur, + xmlNodePtr node, + xsltTemplatePtr templ, + xsltTransformContextPtr ctxt); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_TRANSFORM_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/variables.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/variables.h new file mode 100644 index 000000000..e2adee0f7 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/variables.h @@ -0,0 +1,118 @@ +/* + * Summary: interface for the variable matching and lookup. + * Description: interface for the variable matching and lookup. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_VARIABLES_H__ +#define __XML_XSLT_VARIABLES_H__ + +#include +#include +#include "xsltexports.h" +#include "xsltInternals.h" +#include "functions.h" + +#ifdef __cplusplus +extern "C" { +#endif + + +/** + * XSLT_REGISTER_VARIABLE_LOOKUP: + * + * Registering macro, not general purpose at all but used in different modules. + */ + +#define XSLT_REGISTER_VARIABLE_LOOKUP(ctxt) \ + xmlXPathRegisterVariableLookup((ctxt)->xpathCtxt, \ + xsltXPathVariableLookup, (void *)(ctxt)); \ + xsltRegisterAllFunctions((ctxt)->xpathCtxt); \ + xsltRegisterAllElement(ctxt); \ + (ctxt)->xpathCtxt->extra = ctxt + +/* + * Flags for memory management of RVTs + */ + +/** + * XSLT_RVT_LOCAL: + * + * RVT is destroyed after the current instructions ends. + */ +#define XSLT_RVT_LOCAL 1 + +/** + * XSLT_RVT_FUNC_RESULT: + * + * RVT is part of results returned with func:result. The RVT won't be + * destroyed after exiting a template and will be reset to XSLT_RVT_LOCAL or + * XSLT_RVT_VARIABLE in the template that receives the return value. + */ +#define XSLT_RVT_FUNC_RESULT 2 + +/** + * XSLT_RVT_GLOBAL: + * + * RVT is part of a global variable. + */ +#define XSLT_RVT_GLOBAL 3 + +/* + * Interfaces for the variable module. + */ + +XSLTPUBFUN int XSLTCALL + xsltEvalGlobalVariables (xsltTransformContextPtr ctxt); +XSLTPUBFUN int XSLTCALL + xsltEvalUserParams (xsltTransformContextPtr ctxt, + const char **params); +XSLTPUBFUN int XSLTCALL + xsltQuoteUserParams (xsltTransformContextPtr ctxt, + const char **params); +XSLTPUBFUN int XSLTCALL + xsltEvalOneUserParam (xsltTransformContextPtr ctxt, + const xmlChar * name, + const xmlChar * value); +XSLTPUBFUN int XSLTCALL + xsltQuoteOneUserParam (xsltTransformContextPtr ctxt, + const xmlChar * name, + const xmlChar * value); + +XSLTPUBFUN void XSLTCALL + xsltParseGlobalVariable (xsltStylesheetPtr style, + xmlNodePtr cur); +XSLTPUBFUN void XSLTCALL + xsltParseGlobalParam (xsltStylesheetPtr style, + xmlNodePtr cur); +XSLTPUBFUN void XSLTCALL + xsltParseStylesheetVariable (xsltTransformContextPtr ctxt, + xmlNodePtr cur); +XSLTPUBFUN void XSLTCALL + xsltParseStylesheetParam (xsltTransformContextPtr ctxt, + xmlNodePtr cur); +XSLTPUBFUN xsltStackElemPtr XSLTCALL + xsltParseStylesheetCallerParam (xsltTransformContextPtr ctxt, + xmlNodePtr cur); +XSLTPUBFUN int XSLTCALL + xsltAddStackElemList (xsltTransformContextPtr ctxt, + xsltStackElemPtr elems); +XSLTPUBFUN void XSLTCALL + xsltFreeGlobalVariables (xsltTransformContextPtr ctxt); +XSLTPUBFUN xmlXPathObjectPtr XSLTCALL + xsltVariableLookup (xsltTransformContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri); +XSLTPUBFUN xmlXPathObjectPtr XSLTCALL + xsltXPathVariableLookup (void *ctxt, + const xmlChar *name, + const xmlChar *ns_uri); +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_VARIABLES_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xslt.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xslt.h new file mode 100644 index 000000000..02f491a58 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xslt.h @@ -0,0 +1,110 @@ +/* + * Summary: Interfaces, constants and types related to the XSLT engine + * Description: Interfaces, constants and types related to the XSLT engine + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_H__ +#define __XML_XSLT_H__ + +#include +#include "xsltexports.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XSLT_DEFAULT_VERSION: + * + * The default version of XSLT supported. + */ +#define XSLT_DEFAULT_VERSION "1.0" + +/** + * XSLT_DEFAULT_VENDOR: + * + * The XSLT "vendor" string for this processor. + */ +#define XSLT_DEFAULT_VENDOR "libxslt" + +/** + * XSLT_DEFAULT_URL: + * + * The XSLT "vendor" URL for this processor. + */ +#define XSLT_DEFAULT_URL "http://xmlsoft.org/XSLT/" + +/** + * XSLT_NAMESPACE: + * + * The XSLT specification namespace. + */ +#define XSLT_NAMESPACE ((const xmlChar *)"http://www.w3.org/1999/XSL/Transform") + +/** + * XSLT_PARSE_OPTIONS: + * + * The set of options to pass to an xmlReadxxx when loading files for + * XSLT consumption. + */ +#define XSLT_PARSE_OPTIONS \ + XML_PARSE_NOENT | XML_PARSE_DTDLOAD | XML_PARSE_DTDATTR | XML_PARSE_NOCDATA + +/** + * xsltMaxDepth: + * + * This value is used to detect templates loops. + */ +XSLTPUBVAR int xsltMaxDepth; + +/** + * * xsltMaxVars: + * * + * * This value is used to detect templates loops. + * */ +XSLTPUBVAR int xsltMaxVars; + +/** + * xsltEngineVersion: + * + * The version string for libxslt. + */ +XSLTPUBVAR const char *xsltEngineVersion; + +/** + * xsltLibxsltVersion: + * + * The version of libxslt compiled. + */ +XSLTPUBVAR const int xsltLibxsltVersion; + +/** + * xsltLibxmlVersion: + * + * The version of libxml libxslt was compiled against. + */ +XSLTPUBVAR const int xsltLibxmlVersion; + +/* + * Global initialization function. + */ + +XSLTPUBFUN void XSLTCALL + xsltInit (void); + +/* + * Global cleanup function. + */ +XSLTPUBFUN void XSLTCALL + xsltCleanupGlobals (void); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltInternals.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltInternals.h new file mode 100644 index 000000000..6faa07db5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltInternals.h @@ -0,0 +1,1995 @@ +/* + * Summary: internal data structures, constants and functions + * Description: Internal data structures, constants and functions used + * by the XSLT engine. + * They are not part of the API or ABI, i.e. they can change + * without prior notice, use carefully. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLT_INTERNALS_H__ +#define __XML_XSLT_INTERNALS_H__ + +#include +#include +#include +#include +#include +#include +#include +#include "xsltexports.h" +#include "numbersInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* #define XSLT_DEBUG_PROFILE_CACHE */ + +/** + * XSLT_IS_TEXT_NODE: + * + * check if the argument is a text node + */ +#define XSLT_IS_TEXT_NODE(n) ((n != NULL) && \ + (((n)->type == XML_TEXT_NODE) || \ + ((n)->type == XML_CDATA_SECTION_NODE))) + + +/** + * XSLT_MARK_RES_TREE_FRAG: + * + * internal macro to set up tree fragments + */ +#define XSLT_MARK_RES_TREE_FRAG(n) \ + (n)->name = (char *) xmlStrdup(BAD_CAST " fake node libxslt"); + +/** + * XSLT_IS_RES_TREE_FRAG: + * + * internal macro to test tree fragments + */ +#define XSLT_IS_RES_TREE_FRAG(n) \ + ((n != NULL) && ((n)->type == XML_DOCUMENT_NODE) && \ + ((n)->name != NULL) && ((n)->name[0] == ' ')) + +/** + * XSLT_REFACTORED_KEYCOMP: + * + * Internal define to enable on-demand xsl:key computation. + * That's the only mode now but the define is kept for compatibility + */ +#define XSLT_REFACTORED_KEYCOMP + +/** + * XSLT_FAST_IF: + * + * Internal define to enable usage of xmlXPathCompiledEvalToBoolean() + * for XSLT "tests"; e.g. in + */ +#define XSLT_FAST_IF + +/** + * XSLT_REFACTORED: + * + * Internal define to enable the refactored parts of Libxslt. + */ +/* #define XSLT_REFACTORED */ +/* ==================================================================== */ + +/** + * XSLT_REFACTORED_VARS: + * + * Internal define to enable the refactored variable part of libxslt + */ +#define XSLT_REFACTORED_VARS + +#ifdef XSLT_REFACTORED + +extern const xmlChar *xsltXSLTAttrMarker; + + +/* TODO: REMOVE: #define XSLT_REFACTORED_EXCLRESNS */ + +/* TODO: REMOVE: #define XSLT_REFACTORED_NSALIAS */ + +/** + * XSLT_REFACTORED_XSLT_NSCOMP + * + * Internal define to enable the pointer-comparison of + * namespaces of XSLT elements. + */ +/* #define XSLT_REFACTORED_XSLT_NSCOMP */ + +#ifdef XSLT_REFACTORED_XSLT_NSCOMP + +extern const xmlChar *xsltConstNamespaceNameXSLT; + +/** + * IS_XSLT_ELEM_FAST: + * + * quick test to detect XSLT elements + */ +#define IS_XSLT_ELEM_FAST(n) \ + (((n) != NULL) && ((n)->ns != NULL) && \ + ((n)->ns->href == xsltConstNamespaceNameXSLT)) + +/** + * IS_XSLT_ATTR_FAST: + * + * quick test to detect XSLT attributes + */ +#define IS_XSLT_ATTR_FAST(a) \ + (((a) != NULL) && ((a)->ns != NULL) && \ + ((a)->ns->href == xsltConstNamespaceNameXSLT)) + +/** + * XSLT_HAS_INTERNAL_NSMAP: + * + * check for namespace mapping + */ +#define XSLT_HAS_INTERNAL_NSMAP(s) \ + (((s) != NULL) && ((s)->principal) && \ + ((s)->principal->principalData) && \ + ((s)->principal->principalData->nsMap)) + +/** + * XSLT_GET_INTERNAL_NSMAP: + * + * get pointer to namespace map + */ +#define XSLT_GET_INTERNAL_NSMAP(s) ((s)->principal->principalData->nsMap) + +#else /* XSLT_REFACTORED_XSLT_NSCOMP */ + +/** + * IS_XSLT_ELEM_FAST: + * + * quick check whether this is an xslt element + */ +#define IS_XSLT_ELEM_FAST(n) \ + (((n) != NULL) && ((n)->ns != NULL) && \ + (xmlStrEqual((n)->ns->href, XSLT_NAMESPACE))) + +/** + * IS_XSLT_ATTR_FAST: + * + * quick check for xslt namespace attribute + */ +#define IS_XSLT_ATTR_FAST(a) \ + (((a) != NULL) && ((a)->ns != NULL) && \ + (xmlStrEqual((a)->ns->href, XSLT_NAMESPACE))) + + +#endif /* XSLT_REFACTORED_XSLT_NSCOMP */ + + +/** + * XSLT_REFACTORED_MANDATORY_VERSION: + * + * TODO: Currently disabled to surpress regression test failures, since + * the old behaviour was that a missing version attribute + * produced a only a warning and not an error, which was incerrect. + * So the regression tests need to be fixed if this is enabled. + */ +/* #define XSLT_REFACTORED_MANDATORY_VERSION */ + +/** + * xsltPointerList: + * + * Pointer-list for various purposes. + */ +typedef struct _xsltPointerList xsltPointerList; +typedef xsltPointerList *xsltPointerListPtr; +struct _xsltPointerList { + void **items; + int number; + int size; +}; + +#endif + +/** + * XSLT_REFACTORED_PARSING: + * + * Internal define to enable the refactored parts of Libxslt + * related to parsing. + */ +/* #define XSLT_REFACTORED_PARSING */ + +/** + * XSLT_MAX_SORT: + * + * Max number of specified xsl:sort on an element. + */ +#define XSLT_MAX_SORT 15 + +/** + * XSLT_PAT_NO_PRIORITY: + * + * Specific value for pattern without priority expressed. + */ +#define XSLT_PAT_NO_PRIORITY -12345789 + +/** + * xsltRuntimeExtra: + * + * Extra information added to the transformation context. + */ +typedef struct _xsltRuntimeExtra xsltRuntimeExtra; +typedef xsltRuntimeExtra *xsltRuntimeExtraPtr; +struct _xsltRuntimeExtra { + void *info; /* pointer to the extra data */ + xmlFreeFunc deallocate; /* pointer to the deallocation routine */ + union { /* dual-purpose field */ + void *ptr; /* data not needing deallocation */ + int ival; /* integer value storage */ + } val; +}; + +/** + * XSLT_RUNTIME_EXTRA_LST: + * @ctxt: the transformation context + * @nr: the index + * + * Macro used to access extra information stored in the context + */ +#define XSLT_RUNTIME_EXTRA_LST(ctxt, nr) (ctxt)->extras[(nr)].info +/** + * XSLT_RUNTIME_EXTRA_FREE: + * @ctxt: the transformation context + * @nr: the index + * + * Macro used to free extra information stored in the context + */ +#define XSLT_RUNTIME_EXTRA_FREE(ctxt, nr) (ctxt)->extras[(nr)].deallocate +/** + * XSLT_RUNTIME_EXTRA: + * @ctxt: the transformation context + * @nr: the index + * + * Macro used to define extra information stored in the context + */ +#define XSLT_RUNTIME_EXTRA(ctxt, nr, typ) (ctxt)->extras[(nr)].val.typ + +/** + * xsltTemplate: + * + * The in-memory structure corresponding to an XSLT Template. + */ +typedef struct _xsltTemplate xsltTemplate; +typedef xsltTemplate *xsltTemplatePtr; +struct _xsltTemplate { + struct _xsltTemplate *next;/* chained list sorted by priority */ + struct _xsltStylesheet *style;/* the containing stylesheet */ + xmlChar *match; /* the matching string */ + float priority; /* as given from the stylesheet, not computed */ + const xmlChar *name; /* the local part of the name QName */ + const xmlChar *nameURI; /* the URI part of the name QName */ + const xmlChar *mode;/* the local part of the mode QName */ + const xmlChar *modeURI;/* the URI part of the mode QName */ + xmlNodePtr content; /* the template replacement value */ + xmlNodePtr elem; /* the source element */ + + /* + * TODO: @inheritedNsNr and @inheritedNs won't be used in the + * refactored code. + */ + int inheritedNsNr; /* number of inherited namespaces */ + xmlNsPtr *inheritedNs;/* inherited non-excluded namespaces */ + + /* Profiling information */ + int nbCalls; /* the number of time the template was called */ + unsigned long time; /* the time spent in this template */ + void *params; /* xsl:param instructions */ + + int templNr; /* Nb of templates in the stack */ + int templMax; /* Size of the templtes stack */ + xsltTemplatePtr *templCalledTab; /* templates called */ + int *templCountTab; /* .. and how often */ + + /* Conflict resolution */ + int position; +}; + +/** + * xsltDecimalFormat: + * + * Data structure of decimal-format. + */ +typedef struct _xsltDecimalFormat xsltDecimalFormat; +typedef xsltDecimalFormat *xsltDecimalFormatPtr; +struct _xsltDecimalFormat { + struct _xsltDecimalFormat *next; /* chained list */ + xmlChar *name; + /* Used for interpretation of pattern */ + xmlChar *digit; + xmlChar *patternSeparator; + /* May appear in result */ + xmlChar *minusSign; + xmlChar *infinity; + xmlChar *noNumber; /* Not-a-number */ + /* Used for interpretation of pattern and may appear in result */ + xmlChar *decimalPoint; + xmlChar *grouping; + xmlChar *percent; + xmlChar *permille; + xmlChar *zeroDigit; + const xmlChar *nsUri; +}; + +/** + * xsltDocument: + * + * Data structure associated to a parsed document. + */ +typedef struct _xsltDocument xsltDocument; +typedef xsltDocument *xsltDocumentPtr; +struct _xsltDocument { + struct _xsltDocument *next; /* documents are kept in a chained list */ + int main; /* is this the main document */ + xmlDocPtr doc; /* the parsed document */ + void *keys; /* key tables storage */ + struct _xsltDocument *includes; /* subsidiary includes */ + int preproc; /* pre-processing already done */ + int nbKeysComputed; +}; + +/** + * xsltKeyDef: + * + * Representation of an xsl:key. + */ +typedef struct _xsltKeyDef xsltKeyDef; +typedef xsltKeyDef *xsltKeyDefPtr; +struct _xsltKeyDef { + struct _xsltKeyDef *next; + xmlNodePtr inst; + xmlChar *name; + xmlChar *nameURI; + xmlChar *match; + xmlChar *use; + xmlXPathCompExprPtr comp; + xmlXPathCompExprPtr usecomp; + xmlNsPtr *nsList; /* the namespaces in scope */ + int nsNr; /* the number of namespaces in scope */ +}; + +/** + * xsltKeyTable: + * + * Holds the computed keys for key definitions of the same QName. + * Is owned by an xsltDocument. + */ +typedef struct _xsltKeyTable xsltKeyTable; +typedef xsltKeyTable *xsltKeyTablePtr; +struct _xsltKeyTable { + struct _xsltKeyTable *next; + xmlChar *name; + xmlChar *nameURI; + xmlHashTablePtr keys; +}; + +/* + * The in-memory structure corresponding to an XSLT Stylesheet. + * NOTE: most of the content is simply linked from the doc tree + * structure, no specific allocation is made. + */ +typedef struct _xsltStylesheet xsltStylesheet; +typedef xsltStylesheet *xsltStylesheetPtr; + +typedef struct _xsltTransformContext xsltTransformContext; +typedef xsltTransformContext *xsltTransformContextPtr; + +/** + * xsltElemPreComp: + * + * The in-memory structure corresponding to element precomputed data, + * designed to be extended by extension implementors. + */ +typedef struct _xsltElemPreComp xsltElemPreComp; +typedef xsltElemPreComp *xsltElemPreCompPtr; + +/** + * xsltTransformFunction: + * @ctxt: the XSLT transformation context + * @node: the input node + * @inst: the stylesheet node + * @comp: the compiled information from the stylesheet + * + * Signature of the function associated to elements part of the + * stylesheet language like xsl:if or xsl:apply-templates. + */ +typedef void (*xsltTransformFunction) (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst, + xsltElemPreCompPtr comp); + +/** + * xsltSortFunc: + * @ctxt: a transformation context + * @sorts: the node-set to sort + * @nbsorts: the number of sorts + * + * Signature of the function to use during sorting + */ +typedef void (*xsltSortFunc) (xsltTransformContextPtr ctxt, xmlNodePtr *sorts, + int nbsorts); + +typedef enum { + XSLT_FUNC_COPY=1, + XSLT_FUNC_SORT, + XSLT_FUNC_TEXT, + XSLT_FUNC_ELEMENT, + XSLT_FUNC_ATTRIBUTE, + XSLT_FUNC_COMMENT, + XSLT_FUNC_PI, + XSLT_FUNC_COPYOF, + XSLT_FUNC_VALUEOF, + XSLT_FUNC_NUMBER, + XSLT_FUNC_APPLYIMPORTS, + XSLT_FUNC_CALLTEMPLATE, + XSLT_FUNC_APPLYTEMPLATES, + XSLT_FUNC_CHOOSE, + XSLT_FUNC_IF, + XSLT_FUNC_FOREACH, + XSLT_FUNC_DOCUMENT, + XSLT_FUNC_WITHPARAM, + XSLT_FUNC_PARAM, + XSLT_FUNC_VARIABLE, + XSLT_FUNC_WHEN, + XSLT_FUNC_EXTENSION +#ifdef XSLT_REFACTORED + , + XSLT_FUNC_OTHERWISE, + XSLT_FUNC_FALLBACK, + XSLT_FUNC_MESSAGE, + XSLT_FUNC_INCLUDE, + XSLT_FUNC_ATTRSET, + XSLT_FUNC_LITERAL_RESULT_ELEMENT, + XSLT_FUNC_UNKOWN_FORWARDS_COMPAT +#endif +} xsltStyleType; + +/** + * xsltElemPreCompDeallocator: + * @comp: the #xsltElemPreComp to free up + * + * Deallocates an #xsltElemPreComp structure. + */ +typedef void (*xsltElemPreCompDeallocator) (xsltElemPreCompPtr comp); + +/** + * xsltElemPreComp: + * + * The basic structure for compiled items of the AST of the XSLT processor. + * This structure is also intended to be extended by extension implementors. + * TODO: This is somehow not nice, since it has a "free" field, which + * derived stylesheet-structs do not have. + */ +struct _xsltElemPreComp { + xsltElemPreCompPtr next; /* next item in the global chained + list held by xsltStylesheet. */ + xsltStyleType type; /* type of the element */ + xsltTransformFunction func; /* handling function */ + xmlNodePtr inst; /* the node in the stylesheet's tree + corresponding to this item */ + + /* end of common part */ + xsltElemPreCompDeallocator free; /* the deallocator */ +}; + +/** + * xsltStylePreComp: + * + * The abstract basic structure for items of the XSLT processor. + * This includes: + * 1) compiled forms of XSLT instructions (xsl:if, xsl:attribute, etc.) + * 2) compiled forms of literal result elements + * 3) compiled forms of extension elements + */ +typedef struct _xsltStylePreComp xsltStylePreComp; +typedef xsltStylePreComp *xsltStylePreCompPtr; + +#ifdef XSLT_REFACTORED + +/* +* Some pointer-list utility functions. +*/ +XSLTPUBFUN xsltPointerListPtr XSLTCALL + xsltPointerListCreate (int initialSize); +XSLTPUBFUN void XSLTCALL + xsltPointerListFree (xsltPointerListPtr list); +XSLTPUBFUN void XSLTCALL + xsltPointerListClear (xsltPointerListPtr list); +XSLTPUBFUN int XSLTCALL + xsltPointerListAddSize (xsltPointerListPtr list, + void *item, + int initialSize); + +/************************************************************************ + * * + * Refactored structures * + * * + ************************************************************************/ + +typedef struct _xsltNsListContainer xsltNsListContainer; +typedef xsltNsListContainer *xsltNsListContainerPtr; +struct _xsltNsListContainer { + xmlNsPtr *list; + int totalNumber; + int xpathNumber; +}; + +/** + * XSLT_ITEM_COMPATIBILITY_FIELDS: + * + * Fields for API compatibility to the structure + * _xsltElemPreComp which is used for extension functions. + * Note that @next is used for storage; it does not reflect a next + * sibling in the tree. + * TODO: Evaluate if we really need such a compatibility. + */ +#define XSLT_ITEM_COMPATIBILITY_FIELDS \ + xsltElemPreCompPtr next;\ + xsltStyleType type;\ + xsltTransformFunction func;\ + xmlNodePtr inst; + +/** + * XSLT_ITEM_NAVIGATION_FIELDS: + * + * Currently empty. + * TODO: It is intended to hold navigational fields in the future. + */ +#define XSLT_ITEM_NAVIGATION_FIELDS +/* + xsltStylePreCompPtr parent;\ + xsltStylePreCompPtr children;\ + xsltStylePreCompPtr nextItem; +*/ + +/** + * XSLT_ITEM_NSINSCOPE_FIELDS: + * + * The in-scope namespaces. + */ +#define XSLT_ITEM_NSINSCOPE_FIELDS xsltNsListContainerPtr inScopeNs; + +/** + * XSLT_ITEM_COMMON_FIELDS: + * + * Common fields used for all items. + */ +#define XSLT_ITEM_COMMON_FIELDS \ + XSLT_ITEM_COMPATIBILITY_FIELDS \ + XSLT_ITEM_NAVIGATION_FIELDS \ + XSLT_ITEM_NSINSCOPE_FIELDS + +/** + * _xsltStylePreComp: + * + * The abstract basic structure for items of the XSLT processor. + * This includes: + * 1) compiled forms of XSLT instructions (e.g. xsl:if, xsl:attribute, etc.) + * 2) compiled forms of literal result elements + * 3) various properties for XSLT instructions (e.g. xsl:when, + * xsl:with-param) + * + * REVISIT TODO: Keep this structure equal to the fields + * defined by XSLT_ITEM_COMMON_FIELDS + */ +struct _xsltStylePreComp { + xsltElemPreCompPtr next; /* next item in the global chained + list held by xsltStylesheet */ + xsltStyleType type; /* type of the item */ + xsltTransformFunction func; /* handling function */ + xmlNodePtr inst; /* the node in the stylesheet's tree + corresponding to this item. */ + /* Currently no navigational fields. */ + xsltNsListContainerPtr inScopeNs; +}; + +/** + * xsltStyleBasicEmptyItem: + * + * Abstract structure only used as a short-cut for + * XSLT items with no extra fields. + * NOTE that it is intended that this structure looks the same as + * _xsltStylePreComp. + */ +typedef struct _xsltStyleBasicEmptyItem xsltStyleBasicEmptyItem; +typedef xsltStyleBasicEmptyItem *xsltStyleBasicEmptyItemPtr; + +struct _xsltStyleBasicEmptyItem { + XSLT_ITEM_COMMON_FIELDS +}; + +/** + * xsltStyleBasicExpressionItem: + * + * Abstract structure only used as a short-cut for + * XSLT items with just an expression. + */ +typedef struct _xsltStyleBasicExpressionItem xsltStyleBasicExpressionItem; +typedef xsltStyleBasicExpressionItem *xsltStyleBasicExpressionItemPtr; + +struct _xsltStyleBasicExpressionItem { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *select; /* TODO: Change this to "expression". */ + xmlXPathCompExprPtr comp; /* TODO: Change this to compExpr. */ +}; + +/************************************************************************ + * * + * XSLT-instructions/declarations * + * * + ************************************************************************/ + +/** + * xsltStyleItemElement: + * + * + * + * + * + */ +typedef struct _xsltStyleItemElement xsltStyleItemElement; +typedef xsltStyleItemElement *xsltStyleItemElementPtr; + +struct _xsltStyleItemElement { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *use; + int has_use; + const xmlChar *name; + int has_name; + const xmlChar *ns; + const xmlChar *nsPrefix; + int has_ns; +}; + +/** + * xsltStyleItemAttribute: + * + * + * + * + * + */ +typedef struct _xsltStyleItemAttribute xsltStyleItemAttribute; +typedef xsltStyleItemAttribute *xsltStyleItemAttributePtr; + +struct _xsltStyleItemAttribute { + XSLT_ITEM_COMMON_FIELDS + const xmlChar *name; + int has_name; + const xmlChar *ns; + const xmlChar *nsPrefix; + int has_ns; +}; + +/** + * xsltStyleItemText: + * + * + * + * + * + */ +typedef struct _xsltStyleItemText xsltStyleItemText; +typedef xsltStyleItemText *xsltStyleItemTextPtr; + +struct _xsltStyleItemText { + XSLT_ITEM_COMMON_FIELDS + int noescape; /* text */ +}; + +/** + * xsltStyleItemComment: + * + * + * + * + * + */ +typedef xsltStyleBasicEmptyItem xsltStyleItemComment; +typedef xsltStyleItemComment *xsltStyleItemCommentPtr; + +/** + * xsltStyleItemPI: + * + * + * + * + * + */ +typedef struct _xsltStyleItemPI xsltStyleItemPI; +typedef xsltStyleItemPI *xsltStyleItemPIPtr; + +struct _xsltStyleItemPI { + XSLT_ITEM_COMMON_FIELDS + const xmlChar *name; + int has_name; +}; + +/** + * xsltStyleItemApplyImports: + * + * + * + */ +typedef xsltStyleBasicEmptyItem xsltStyleItemApplyImports; +typedef xsltStyleItemApplyImports *xsltStyleItemApplyImportsPtr; + +/** + * xsltStyleItemApplyTemplates: + * + * + * + * + * + */ +typedef struct _xsltStyleItemApplyTemplates xsltStyleItemApplyTemplates; +typedef xsltStyleItemApplyTemplates *xsltStyleItemApplyTemplatesPtr; + +struct _xsltStyleItemApplyTemplates { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *mode; /* apply-templates */ + const xmlChar *modeURI; /* apply-templates */ + const xmlChar *select; /* sort, copy-of, value-of, apply-templates */ + xmlXPathCompExprPtr comp; /* a precompiled XPath expression */ + /* TODO: with-params */ +}; + +/** + * xsltStyleItemCallTemplate: + * + * + * + * + * + */ +typedef struct _xsltStyleItemCallTemplate xsltStyleItemCallTemplate; +typedef xsltStyleItemCallTemplate *xsltStyleItemCallTemplatePtr; + +struct _xsltStyleItemCallTemplate { + XSLT_ITEM_COMMON_FIELDS + + xsltTemplatePtr templ; /* call-template */ + const xmlChar *name; /* element, attribute, pi */ + int has_name; /* element, attribute, pi */ + const xmlChar *ns; /* element */ + int has_ns; /* element */ + /* TODO: with-params */ +}; + +/** + * xsltStyleItemCopy: + * + * + * + * + * + */ +typedef struct _xsltStyleItemCopy xsltStyleItemCopy; +typedef xsltStyleItemCopy *xsltStyleItemCopyPtr; + +struct _xsltStyleItemCopy { + XSLT_ITEM_COMMON_FIELDS + const xmlChar *use; /* copy, element */ + int has_use; /* copy, element */ +}; + +/** + * xsltStyleItemIf: + * + * + * + * + * + */ +typedef struct _xsltStyleItemIf xsltStyleItemIf; +typedef xsltStyleItemIf *xsltStyleItemIfPtr; + +struct _xsltStyleItemIf { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *test; /* if */ + xmlXPathCompExprPtr comp; /* a precompiled XPath expression */ +}; + + +/** + * xsltStyleItemCopyOf: + * + * + * + */ +typedef xsltStyleBasicExpressionItem xsltStyleItemCopyOf; +typedef xsltStyleItemCopyOf *xsltStyleItemCopyOfPtr; + +/** + * xsltStyleItemValueOf: + * + * + * + */ +typedef struct _xsltStyleItemValueOf xsltStyleItemValueOf; +typedef xsltStyleItemValueOf *xsltStyleItemValueOfPtr; + +struct _xsltStyleItemValueOf { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *select; + xmlXPathCompExprPtr comp; /* a precompiled XPath expression */ + int noescape; +}; + +/** + * xsltStyleItemNumber: + * + * + * + */ +typedef struct _xsltStyleItemNumber xsltStyleItemNumber; +typedef xsltStyleItemNumber *xsltStyleItemNumberPtr; + +struct _xsltStyleItemNumber { + XSLT_ITEM_COMMON_FIELDS + xsltNumberData numdata; /* number */ +}; + +/** + * xsltStyleItemChoose: + * + * + * + * + * + */ +typedef xsltStyleBasicEmptyItem xsltStyleItemChoose; +typedef xsltStyleItemChoose *xsltStyleItemChoosePtr; + +/** + * xsltStyleItemFallback: + * + * + * + * + * + */ +typedef xsltStyleBasicEmptyItem xsltStyleItemFallback; +typedef xsltStyleItemFallback *xsltStyleItemFallbackPtr; + +/** + * xsltStyleItemForEach: + * + * + * + * + * + */ +typedef xsltStyleBasicExpressionItem xsltStyleItemForEach; +typedef xsltStyleItemForEach *xsltStyleItemForEachPtr; + +/** + * xsltStyleItemMessage: + * + * + * + * + * + */ +typedef struct _xsltStyleItemMessage xsltStyleItemMessage; +typedef xsltStyleItemMessage *xsltStyleItemMessagePtr; + +struct _xsltStyleItemMessage { + XSLT_ITEM_COMMON_FIELDS + int terminate; +}; + +/** + * xsltStyleItemDocument: + * + * NOTE: This is not an instruction of XSLT 1.0. + */ +typedef struct _xsltStyleItemDocument xsltStyleItemDocument; +typedef xsltStyleItemDocument *xsltStyleItemDocumentPtr; + +struct _xsltStyleItemDocument { + XSLT_ITEM_COMMON_FIELDS + int ver11; /* assigned: in xsltDocumentComp; + read: nowhere; + TODO: Check if we need. */ + const xmlChar *filename; /* document URL */ + int has_filename; +}; + +/************************************************************************ + * * + * Non-instructions (actually properties of instructions/declarations) * + * * + ************************************************************************/ + +/** + * xsltStyleBasicItemVariable: + * + * Basic struct for xsl:variable, xsl:param and xsl:with-param. + * It's currently important to have equal fields, since + * xsltParseStylesheetCallerParam() is used with xsl:with-param from + * the xslt side and with xsl:param from the exslt side (in + * exsltFuncFunctionFunction()). + * + * FUTURE NOTE: In XSLT 2.0 xsl:param, xsl:variable and xsl:with-param + * have additional different fields. + */ +typedef struct _xsltStyleBasicItemVariable xsltStyleBasicItemVariable; +typedef xsltStyleBasicItemVariable *xsltStyleBasicItemVariablePtr; + +struct _xsltStyleBasicItemVariable { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *select; + xmlXPathCompExprPtr comp; + + const xmlChar *name; + int has_name; + const xmlChar *ns; + int has_ns; +}; + +/** + * xsltStyleItemVariable: + * + * + * + * + * + */ +typedef xsltStyleBasicItemVariable xsltStyleItemVariable; +typedef xsltStyleItemVariable *xsltStyleItemVariablePtr; + +/** + * xsltStyleItemParam: + * + * + * + * + * + */ +typedef struct _xsltStyleItemParam xsltStyleItemParam; +typedef xsltStyleItemParam *xsltStyleItemParamPtr; + +struct _xsltStyleItemParam { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *select; + xmlXPathCompExprPtr comp; + + const xmlChar *name; + int has_name; + const xmlChar *ns; + int has_ns; +}; + +/** + * xsltStyleItemWithParam: + * + * + * + * + */ +typedef xsltStyleBasicItemVariable xsltStyleItemWithParam; +typedef xsltStyleItemWithParam *xsltStyleItemWithParamPtr; + +/** + * xsltStyleItemSort: + * + * Reflects the XSLT xsl:sort item. + * Allowed parents: xsl:apply-templates, xsl:for-each + * + */ +typedef struct _xsltStyleItemSort xsltStyleItemSort; +typedef xsltStyleItemSort *xsltStyleItemSortPtr; + +struct _xsltStyleItemSort { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *stype; /* sort */ + int has_stype; /* sort */ + int number; /* sort */ + const xmlChar *order; /* sort */ + int has_order; /* sort */ + int descending; /* sort */ + const xmlChar *lang; /* sort */ + int has_lang; /* sort */ + const xmlChar *case_order; /* sort */ + int lower_first; /* sort */ + + const xmlChar *use; + int has_use; + + const xmlChar *select; /* sort, copy-of, value-of, apply-templates */ + + xmlXPathCompExprPtr comp; /* a precompiled XPath expression */ +}; + + +/** + * xsltStyleItemWhen: + * + * + * + * + * Allowed parent: xsl:choose + */ +typedef struct _xsltStyleItemWhen xsltStyleItemWhen; +typedef xsltStyleItemWhen *xsltStyleItemWhenPtr; + +struct _xsltStyleItemWhen { + XSLT_ITEM_COMMON_FIELDS + + const xmlChar *test; + xmlXPathCompExprPtr comp; +}; + +/** + * xsltStyleItemOtherwise: + * + * Allowed parent: xsl:choose + * + * + * + */ +typedef struct _xsltStyleItemOtherwise xsltStyleItemOtherwise; +typedef xsltStyleItemOtherwise *xsltStyleItemOtherwisePtr; + +struct _xsltStyleItemOtherwise { + XSLT_ITEM_COMMON_FIELDS +}; + +typedef struct _xsltStyleItemInclude xsltStyleItemInclude; +typedef xsltStyleItemInclude *xsltStyleItemIncludePtr; + +struct _xsltStyleItemInclude { + XSLT_ITEM_COMMON_FIELDS + xsltDocumentPtr include; +}; + +/************************************************************************ + * * + * XSLT elements in forwards-compatible mode * + * * + ************************************************************************/ + +typedef struct _xsltStyleItemUknown xsltStyleItemUknown; +typedef xsltStyleItemUknown *xsltStyleItemUknownPtr; +struct _xsltStyleItemUknown { + XSLT_ITEM_COMMON_FIELDS +}; + + +/************************************************************************ + * * + * Extension elements * + * * + ************************************************************************/ + +/* + * xsltStyleItemExtElement: + * + * Reflects extension elements. + * + * NOTE: Due to the fact that the structure xsltElemPreComp is most + * probably already heavily in use out there by users, so we cannot + * easily change it, we'll create an intermediate structure which will + * hold an xsltElemPreCompPtr. + * BIG NOTE: The only problem I see here is that the user processes the + * content of the stylesheet tree, possibly he'll lookup the node->psvi + * fields in order to find subsequent extension functions. + * In this case, the user's code will break, since the node->psvi + * field will hold now the xsltStyleItemExtElementPtr and not + * the xsltElemPreCompPtr. + * However the place where the structure is anchored in the node-tree, + * namely node->psvi, has beed already once been moved from node->_private + * to node->psvi, so we have a precedent here, which, I think, should allow + * us to change such semantics without headaches. + */ +typedef struct _xsltStyleItemExtElement xsltStyleItemExtElement; +typedef xsltStyleItemExtElement *xsltStyleItemExtElementPtr; +struct _xsltStyleItemExtElement { + XSLT_ITEM_COMMON_FIELDS + xsltElemPreCompPtr item; +}; + +/************************************************************************ + * * + * Literal result elements * + * * + ************************************************************************/ + +typedef struct _xsltEffectiveNs xsltEffectiveNs; +typedef xsltEffectiveNs *xsltEffectiveNsPtr; +struct _xsltEffectiveNs { + xsltEffectiveNsPtr nextInStore; /* storage next */ + xsltEffectiveNsPtr next; /* next item in the list */ + const xmlChar *prefix; + const xmlChar *nsName; + /* + * Indicates if eclared on the literal result element; dunno if really + * needed. + */ + int holdByElem; +}; + +/* + * Info for literal result elements. + * This will be set on the elem->psvi field and will be + * shared by literal result elements, which have the same + * excluded result namespaces; i.e., this *won't* be created uniquely + * for every literal result element. + */ +typedef struct _xsltStyleItemLRElementInfo xsltStyleItemLRElementInfo; +typedef xsltStyleItemLRElementInfo *xsltStyleItemLRElementInfoPtr; +struct _xsltStyleItemLRElementInfo { + XSLT_ITEM_COMMON_FIELDS + /* + * @effectiveNs is the set of effective ns-nodes + * on the literal result element, which will be added to the result + * element if not already existing in the result tree. + * This means that excluded namespaces (via exclude-result-prefixes, + * extension-element-prefixes and the XSLT namespace) not added + * to the set. + * Namespace-aliasing was applied on the @effectiveNs. + */ + xsltEffectiveNsPtr effectiveNs; + +}; + +#ifdef XSLT_REFACTORED + +typedef struct _xsltNsAlias xsltNsAlias; +typedef xsltNsAlias *xsltNsAliasPtr; +struct _xsltNsAlias { + xsltNsAliasPtr next; /* next in the list */ + xmlNsPtr literalNs; + xmlNsPtr targetNs; + xmlDocPtr docOfTargetNs; +}; +#endif + +#ifdef XSLT_REFACTORED_XSLT_NSCOMP + +typedef struct _xsltNsMap xsltNsMap; +typedef xsltNsMap *xsltNsMapPtr; +struct _xsltNsMap { + xsltNsMapPtr next; /* next in the list */ + xmlDocPtr doc; + xmlNodePtr elem; /* the element holding the ns-decl */ + xmlNsPtr ns; /* the xmlNs structure holding the XML namespace name */ + const xmlChar *origNsName; /* the original XML namespace name */ + const xmlChar *newNsName; /* the mapped XML namespace name */ +}; +#endif + +/************************************************************************ + * * + * Compile-time structures for *internal* use only * + * * + ************************************************************************/ + +typedef struct _xsltPrincipalStylesheetData xsltPrincipalStylesheetData; +typedef xsltPrincipalStylesheetData *xsltPrincipalStylesheetDataPtr; + +typedef struct _xsltNsList xsltNsList; +typedef xsltNsList *xsltNsListPtr; +struct _xsltNsList { + xsltNsListPtr next; /* next in the list */ + xmlNsPtr ns; +}; + +/* +* xsltVarInfo: +* +* Used at compilation time for parameters and variables. +*/ +typedef struct _xsltVarInfo xsltVarInfo; +typedef xsltVarInfo *xsltVarInfoPtr; +struct _xsltVarInfo { + xsltVarInfoPtr next; /* next in the list */ + xsltVarInfoPtr prev; + int depth; /* the depth in the tree */ + const xmlChar *name; + const xmlChar *nsName; +}; + +/** + * xsltCompilerNodeInfo: + * + * Per-node information during compile-time. + */ +typedef struct _xsltCompilerNodeInfo xsltCompilerNodeInfo; +typedef xsltCompilerNodeInfo *xsltCompilerNodeInfoPtr; +struct _xsltCompilerNodeInfo { + xsltCompilerNodeInfoPtr next; + xsltCompilerNodeInfoPtr prev; + xmlNodePtr node; + int depth; + xsltTemplatePtr templ; /* The owning template */ + int category; /* XSLT element, LR-element or + extension element */ + xsltStyleType type; + xsltElemPreCompPtr item; /* The compiled information */ + /* The current in-scope namespaces */ + xsltNsListContainerPtr inScopeNs; + /* The current excluded result namespaces */ + xsltPointerListPtr exclResultNs; + /* The current extension instruction namespaces */ + xsltPointerListPtr extElemNs; + + /* The current info for literal result elements. */ + xsltStyleItemLRElementInfoPtr litResElemInfo; + /* + * Set to 1 if in-scope namespaces changed, + * or excluded result namespaces changed, + * or extension element namespaces changed. + * This will trigger creation of new infos + * for literal result elements. + */ + int nsChanged; + int preserveWhitespace; + int stripWhitespace; + int isRoot; /* whether this is the stylesheet's root node */ + int forwardsCompat; /* whether forwards-compatible mode is enabled */ + /* whether the content of an extension element was processed */ + int extContentHandled; + /* the type of the current child */ + xsltStyleType curChildType; +}; + +/** + * XSLT_CCTXT: + * + * get pointer to compiler context + */ +#define XSLT_CCTXT(style) ((xsltCompilerCtxtPtr) style->compCtxt) + +typedef enum { + XSLT_ERROR_SEVERITY_ERROR = 0, + XSLT_ERROR_SEVERITY_WARNING +} xsltErrorSeverityType; + +typedef struct _xsltCompilerCtxt xsltCompilerCtxt; +typedef xsltCompilerCtxt *xsltCompilerCtxtPtr; +struct _xsltCompilerCtxt { + void *errorCtxt; /* user specific error context */ + /* + * used for error/warning reports; e.g. XSLT_ERROR_SEVERITY_WARNING */ + xsltErrorSeverityType errSeverity; + int warnings; /* TODO: number of warnings found at + compilation */ + int errors; /* TODO: number of errors found at + compilation */ + xmlDictPtr dict; + xsltStylesheetPtr style; + int simplified; /* whether this is a simplified stylesheet */ + /* TODO: structured/unstructured error contexts. */ + int depth; /* Current depth of processing */ + + xsltCompilerNodeInfoPtr inode; + xsltCompilerNodeInfoPtr inodeList; + xsltCompilerNodeInfoPtr inodeLast; + xsltPointerListPtr tmpList; /* Used for various purposes */ + /* + * The XSLT version as specified by the stylesheet's root element. + */ + int isInclude; + int hasForwardsCompat; /* whether forwards-compatible mode was used + in a parsing episode */ + int maxNodeInfos; /* TEMP TODO: just for the interest */ + int maxLREs; /* TEMP TODO: just for the interest */ + /* + * In order to keep the old behaviour, applying strict rules of + * the spec can be turned off. This has effect only on special + * mechanisms like whitespace-stripping in the stylesheet. + */ + int strict; + xsltPrincipalStylesheetDataPtr psData; + xsltStyleItemUknownPtr unknownItem; + int hasNsAliases; /* Indicator if there was an xsl:namespace-alias. */ + xsltNsAliasPtr nsAliases; + xsltVarInfoPtr ivars; /* Storage of local in-scope variables/params. */ + xsltVarInfoPtr ivar; /* topmost local variable/param. */ +}; + +#else /* XSLT_REFACTORED */ +/* +* The old structures before refactoring. +*/ + +/** + * _xsltStylePreComp: + * + * The in-memory structure corresponding to XSLT stylesheet constructs + * precomputed data. + */ +struct _xsltStylePreComp { + xsltElemPreCompPtr next; /* chained list */ + xsltStyleType type; /* type of the element */ + xsltTransformFunction func; /* handling function */ + xmlNodePtr inst; /* the instruction */ + + /* + * Pre computed values. + */ + + const xmlChar *stype; /* sort */ + int has_stype; /* sort */ + int number; /* sort */ + const xmlChar *order; /* sort */ + int has_order; /* sort */ + int descending; /* sort */ + const xmlChar *lang; /* sort */ + int has_lang; /* sort */ + const xmlChar *case_order; /* sort */ + int lower_first; /* sort */ + + const xmlChar *use; /* copy, element */ + int has_use; /* copy, element */ + + int noescape; /* text */ + + const xmlChar *name; /* element, attribute, pi */ + int has_name; /* element, attribute, pi */ + const xmlChar *ns; /* element */ + int has_ns; /* element */ + + const xmlChar *mode; /* apply-templates */ + const xmlChar *modeURI; /* apply-templates */ + + const xmlChar *test; /* if */ + + xsltTemplatePtr templ; /* call-template */ + + const xmlChar *select; /* sort, copy-of, value-of, apply-templates */ + + int ver11; /* document */ + const xmlChar *filename; /* document URL */ + int has_filename; /* document */ + + xsltNumberData numdata; /* number */ + + xmlXPathCompExprPtr comp; /* a precompiled XPath expression */ + xmlNsPtr *nsList; /* the namespaces in scope */ + int nsNr; /* the number of namespaces in scope */ +}; + +#endif /* XSLT_REFACTORED */ + + +/* + * The in-memory structure corresponding to an XSLT Variable + * or Param. + */ +typedef struct _xsltStackElem xsltStackElem; +typedef xsltStackElem *xsltStackElemPtr; +struct _xsltStackElem { + struct _xsltStackElem *next;/* chained list */ + xsltStylePreCompPtr comp; /* the compiled form */ + int computed; /* was the evaluation done */ + const xmlChar *name; /* the local part of the name QName */ + const xmlChar *nameURI; /* the URI part of the name QName */ + const xmlChar *select; /* the eval string */ + xmlNodePtr tree; /* the sequence constructor if no eval + string or the location */ + xmlXPathObjectPtr value; /* The value if computed */ + xmlDocPtr fragment; /* The Result Tree Fragments (needed for XSLT 1.0) + which are bound to the variable's lifetime. */ + int level; /* the depth in the tree; + -1 if persistent (e.g. a given xsl:with-param) */ + xsltTransformContextPtr context; /* The transformation context; needed to cache + the variables */ + int flags; +}; + +#ifdef XSLT_REFACTORED + +struct _xsltPrincipalStylesheetData { + /* + * Namespace dictionary for ns-prefixes and ns-names: + * TODO: Shared between stylesheets, and XPath mechanisms. + * Not used yet. + */ + xmlDictPtr namespaceDict; + /* + * Global list of in-scope namespaces. + */ + xsltPointerListPtr inScopeNamespaces; + /* + * Global list of information for [xsl:]excluded-result-prefixes. + */ + xsltPointerListPtr exclResultNamespaces; + /* + * Global list of information for [xsl:]extension-element-prefixes. + */ + xsltPointerListPtr extElemNamespaces; + xsltEffectiveNsPtr effectiveNs; +#ifdef XSLT_REFACTORED_XSLT_NSCOMP + /* + * Namespace name map to get rid of string comparison of namespace names. + */ + xsltNsMapPtr nsMap; +#endif +}; + + +#endif +/* + * Note that we added a @compCtxt field to anchor an stylesheet compilation + * context, since, due to historical reasons, various compile-time function + * take only the stylesheet as argument and not a compilation context. + */ +struct _xsltStylesheet { + /* + * The stylesheet import relation is kept as a tree. + */ + struct _xsltStylesheet *parent; + struct _xsltStylesheet *next; + struct _xsltStylesheet *imports; + + xsltDocumentPtr docList; /* the include document list */ + + /* + * General data on the style sheet document. + */ + xmlDocPtr doc; /* the parsed XML stylesheet */ + xmlHashTablePtr stripSpaces;/* the hash table of the strip-space and + preserve space elements */ + int stripAll; /* strip-space * (1) preserve-space * (-1) */ + xmlHashTablePtr cdataSection;/* the hash table of the cdata-section */ + + /* + * Global variable or parameters. + */ + xsltStackElemPtr variables; /* linked list of param and variables */ + + /* + * Template descriptions. + */ + xsltTemplatePtr templates; /* the ordered list of templates */ + xmlHashTablePtr templatesHash; /* hash table or wherever compiled + templates information is stored */ + struct _xsltCompMatch *rootMatch; /* template based on / */ + struct _xsltCompMatch *keyMatch; /* template based on key() */ + struct _xsltCompMatch *elemMatch; /* template based on * */ + struct _xsltCompMatch *attrMatch; /* template based on @* */ + struct _xsltCompMatch *parentMatch; /* template based on .. */ + struct _xsltCompMatch *textMatch; /* template based on text() */ + struct _xsltCompMatch *piMatch; /* template based on + processing-instruction() */ + struct _xsltCompMatch *commentMatch; /* template based on comment() */ + + /* + * Namespace aliases. + * NOTE: Not used in the refactored code. + */ + xmlHashTablePtr nsAliases; /* the namespace alias hash tables */ + + /* + * Attribute sets. + */ + xmlHashTablePtr attributeSets;/* the attribute sets hash tables */ + + /* + * Namespaces. + * TODO: Eliminate this. + */ + xmlHashTablePtr nsHash; /* the set of namespaces in use: + ATTENTION: This is used for + execution of XPath expressions; unfortunately + it restricts the stylesheet to have distinct + prefixes. + TODO: We need to get rid of this. + */ + void *nsDefs; /* ATTENTION TODO: This is currently used to store + xsltExtDefPtr (in extensions.c) and + *not* xmlNsPtr. + */ + + /* + * Key definitions. + */ + void *keys; /* key definitions */ + + /* + * Output related stuff. + */ + xmlChar *method; /* the output method */ + xmlChar *methodURI; /* associated namespace if any */ + xmlChar *version; /* version string */ + xmlChar *encoding; /* encoding string */ + int omitXmlDeclaration; /* omit-xml-declaration = "yes" | "no" */ + + /* + * Number formatting. + */ + xsltDecimalFormatPtr decimalFormat; + int standalone; /* standalone = "yes" | "no" */ + xmlChar *doctypePublic; /* doctype-public string */ + xmlChar *doctypeSystem; /* doctype-system string */ + int indent; /* should output being indented */ + xmlChar *mediaType; /* media-type string */ + + /* + * Precomputed blocks. + */ + xsltElemPreCompPtr preComps;/* list of precomputed blocks */ + int warnings; /* number of warnings found at compilation */ + int errors; /* number of errors found at compilation */ + + xmlChar *exclPrefix; /* last excluded prefixes */ + xmlChar **exclPrefixTab; /* array of excluded prefixes */ + int exclPrefixNr; /* number of excluded prefixes in scope */ + int exclPrefixMax; /* size of the array */ + + void *_private; /* user defined data */ + + /* + * Extensions. + */ + xmlHashTablePtr extInfos; /* the extension data */ + int extrasNr; /* the number of extras required */ + + /* + * For keeping track of nested includes + */ + xsltDocumentPtr includes; /* points to last nested include */ + + /* + * dictionary: shared between stylesheet, context and documents. + */ + xmlDictPtr dict; + /* + * precompiled attribute value templates. + */ + void *attVTs; + /* + * if namespace-alias has an alias for the default stylesheet prefix + * NOTE: Not used in the refactored code. + */ + const xmlChar *defaultAlias; + /* + * bypass pre-processing (already done) (used in imports) + */ + int nopreproc; + /* + * all document text strings were internalized + */ + int internalized; + /* + * Literal Result Element as Stylesheet c.f. section 2.3 + */ + int literal_result; + /* + * The principal stylesheet + */ + xsltStylesheetPtr principal; +#ifdef XSLT_REFACTORED + /* + * Compilation context used during compile-time. + */ + xsltCompilerCtxtPtr compCtxt; /* TODO: Change this to (void *). */ + + xsltPrincipalStylesheetDataPtr principalData; +#endif + /* + * Forwards-compatible processing + */ + int forwards_compatible; + + xmlHashTablePtr namedTemplates; /* hash table of named templates */ + + xmlXPathContextPtr xpathCtxt; + + unsigned long opLimit; + unsigned long opCount; +}; + +typedef struct _xsltTransformCache xsltTransformCache; +typedef xsltTransformCache *xsltTransformCachePtr; +struct _xsltTransformCache { + xmlDocPtr RVT; + int nbRVT; + xsltStackElemPtr stackItems; + int nbStackItems; +#ifdef XSLT_DEBUG_PROFILE_CACHE + int dbgCachedRVTs; + int dbgReusedRVTs; + int dbgCachedVars; + int dbgReusedVars; +#endif +}; + +/* + * The in-memory structure corresponding to an XSLT Transformation. + */ +typedef enum { + XSLT_OUTPUT_XML = 0, + XSLT_OUTPUT_HTML, + XSLT_OUTPUT_TEXT +} xsltOutputType; + +typedef void * +(*xsltNewLocaleFunc)(const xmlChar *lang, int lowerFirst); +typedef void +(*xsltFreeLocaleFunc)(void *locale); +typedef xmlChar * +(*xsltGenSortKeyFunc)(void *locale, const xmlChar *lang); + +typedef enum { + XSLT_STATE_OK = 0, + XSLT_STATE_ERROR, + XSLT_STATE_STOPPED +} xsltTransformState; + +struct _xsltTransformContext { + xsltStylesheetPtr style; /* the stylesheet used */ + xsltOutputType type; /* the type of output */ + + xsltTemplatePtr templ; /* the current template */ + int templNr; /* Nb of templates in the stack */ + int templMax; /* Size of the templtes stack */ + xsltTemplatePtr *templTab; /* the template stack */ + + xsltStackElemPtr vars; /* the current variable list */ + int varsNr; /* Nb of variable list in the stack */ + int varsMax; /* Size of the variable list stack */ + xsltStackElemPtr *varsTab; /* the variable list stack */ + int varsBase; /* the var base for current templ */ + + /* + * Extensions + */ + xmlHashTablePtr extFunctions; /* the extension functions */ + xmlHashTablePtr extElements; /* the extension elements */ + xmlHashTablePtr extInfos; /* the extension data */ + + const xmlChar *mode; /* the current mode */ + const xmlChar *modeURI; /* the current mode URI */ + + xsltDocumentPtr docList; /* the document list */ + + xsltDocumentPtr document; /* the current source document; can be NULL if an RTF */ + xmlNodePtr node; /* the current node being processed */ + xmlNodeSetPtr nodeList; /* the current node list */ + /* xmlNodePtr current; the node */ + + xmlDocPtr output; /* the resulting document */ + xmlNodePtr insert; /* the insertion node */ + + xmlXPathContextPtr xpathCtxt; /* the XPath context */ + xsltTransformState state; /* the current state */ + + /* + * Global variables + */ + xmlHashTablePtr globalVars; /* the global variables and params */ + + xmlNodePtr inst; /* the instruction in the stylesheet */ + + int xinclude; /* should XInclude be processed */ + + const char * outputFile; /* the output URI if known */ + + int profile; /* is this run profiled */ + long prof; /* the current profiled value */ + int profNr; /* Nb of templates in the stack */ + int profMax; /* Size of the templtaes stack */ + long *profTab; /* the profile template stack */ + + void *_private; /* user defined data */ + + int extrasNr; /* the number of extras used */ + int extrasMax; /* the number of extras allocated */ + xsltRuntimeExtraPtr extras; /* extra per runtime information */ + + xsltDocumentPtr styleList; /* the stylesheet docs list */ + void * sec; /* the security preferences if any */ + + xmlGenericErrorFunc error; /* a specific error handler */ + void * errctx; /* context for the error handler */ + + xsltSortFunc sortfunc; /* a ctxt specific sort routine */ + + /* + * handling of temporary Result Value Tree + * (XSLT 1.0 term: "Result Tree Fragment") + */ + xmlDocPtr tmpRVT; /* list of RVT without persistance */ + xmlDocPtr persistRVT; /* list of persistant RVTs */ + int ctxtflags; /* context processing flags */ + + /* + * Speed optimization when coalescing text nodes + */ + const xmlChar *lasttext; /* last text node content */ + int lasttsize; /* last text node size */ + int lasttuse; /* last text node use */ + /* + * Per Context Debugging + */ + int debugStatus; /* the context level debug status */ + unsigned long* traceCode; /* pointer to the variable holding the mask */ + + int parserOptions; /* parser options xmlParserOption */ + + /* + * dictionary: shared between stylesheet, context and documents. + */ + xmlDictPtr dict; + xmlDocPtr tmpDoc; /* Obsolete; not used in the library. */ + /* + * all document text strings are internalized + */ + int internalized; + int nbKeys; + int hasTemplKeyPatterns; + xsltTemplatePtr currentTemplateRule; /* the Current Template Rule */ + xmlNodePtr initialContextNode; + xmlDocPtr initialContextDoc; + xsltTransformCachePtr cache; + void *contextVariable; /* the current variable item */ + xmlDocPtr localRVT; /* list of local tree fragments; will be freed when + the instruction which created the fragment + exits */ + xmlDocPtr localRVTBase; /* Obsolete */ + int keyInitLevel; /* Needed to catch recursive keys issues */ + int depth; /* Needed to catch recursions */ + int maxTemplateDepth; + int maxTemplateVars; + unsigned long opLimit; + unsigned long opCount; + int sourceDocDirty; + unsigned long currentId; /* For generate-id() */ + + xsltNewLocaleFunc newLocale; + xsltFreeLocaleFunc freeLocale; + xsltGenSortKeyFunc genSortKey; +}; + +/** + * CHECK_STOPPED: + * + * Macro to check if the XSLT processing should be stopped. + * Will return from the function. + */ +#define CHECK_STOPPED if (ctxt->state == XSLT_STATE_STOPPED) return; + +/** + * CHECK_STOPPEDE: + * + * Macro to check if the XSLT processing should be stopped. + * Will goto the error: label. + */ +#define CHECK_STOPPEDE if (ctxt->state == XSLT_STATE_STOPPED) goto error; + +/** + * CHECK_STOPPED0: + * + * Macro to check if the XSLT processing should be stopped. + * Will return from the function with a 0 value. + */ +#define CHECK_STOPPED0 if (ctxt->state == XSLT_STATE_STOPPED) return(0); + +/* + * The macro XML_CAST_FPTR is a hack to avoid a gcc warning about + * possible incompatibilities between function pointers and object + * pointers. It is defined in libxml/hash.h within recent versions + * of libxml2, but is put here for compatibility. + */ +#ifndef XML_CAST_FPTR +/** + * XML_CAST_FPTR: + * @fptr: pointer to a function + * + * Macro to do a casting from an object pointer to a + * function pointer without encountering a warning from + * gcc + * + * #define XML_CAST_FPTR(fptr) (*(void **)(&fptr)) + * This macro violated ISO C aliasing rules (gcc4 on s390 broke) + * so it is disabled now + */ + +#define XML_CAST_FPTR(fptr) fptr +#endif +/* + * Functions associated to the internal types +xsltDecimalFormatPtr xsltDecimalFormatGetByName(xsltStylesheetPtr sheet, + xmlChar *name); + */ +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltNewStylesheet (void); +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltParseStylesheetFile (const xmlChar* filename); +XSLTPUBFUN void XSLTCALL + xsltFreeStylesheet (xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltIsBlank (xmlChar *str); +XSLTPUBFUN void XSLTCALL + xsltFreeStackElemList (xsltStackElemPtr elem); +XSLTPUBFUN xsltDecimalFormatPtr XSLTCALL + xsltDecimalFormatGetByName(xsltStylesheetPtr style, + xmlChar *name); +XSLTPUBFUN xsltDecimalFormatPtr XSLTCALL + xsltDecimalFormatGetByQName(xsltStylesheetPtr style, + const xmlChar *nsUri, + const xmlChar *name); + +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltParseStylesheetProcess(xsltStylesheetPtr ret, + xmlDocPtr doc); +XSLTPUBFUN void XSLTCALL + xsltParseStylesheetOutput(xsltStylesheetPtr style, + xmlNodePtr cur); +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltParseStylesheetDoc (xmlDocPtr doc); +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltParseStylesheetImportedDoc(xmlDocPtr doc, + xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltParseStylesheetUser(xsltStylesheetPtr style, + xmlDocPtr doc); +XSLTPUBFUN xsltStylesheetPtr XSLTCALL + xsltLoadStylesheetPI (xmlDocPtr doc); +XSLTPUBFUN void XSLTCALL + xsltNumberFormat (xsltTransformContextPtr ctxt, + xsltNumberDataPtr data, + xmlNodePtr node); +XSLTPUBFUN xmlXPathError XSLTCALL + xsltFormatNumberConversion(xsltDecimalFormatPtr self, + xmlChar *format, + double number, + xmlChar **result); + +XSLTPUBFUN void XSLTCALL + xsltParseTemplateContent(xsltStylesheetPtr style, + xmlNodePtr templ); +XSLTPUBFUN int XSLTCALL + xsltAllocateExtra (xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltAllocateExtraCtxt (xsltTransformContextPtr ctxt); +/* + * Extra functions for Result Value Trees + */ +XSLTPUBFUN xmlDocPtr XSLTCALL + xsltCreateRVT (xsltTransformContextPtr ctxt); +XSLTPUBFUN int XSLTCALL + xsltRegisterTmpRVT (xsltTransformContextPtr ctxt, + xmlDocPtr RVT); +XSLTPUBFUN int XSLTCALL + xsltRegisterLocalRVT (xsltTransformContextPtr ctxt, + xmlDocPtr RVT); +XSLTPUBFUN int XSLTCALL + xsltRegisterPersistRVT (xsltTransformContextPtr ctxt, + xmlDocPtr RVT); +XSLTPUBFUN int XSLTCALL + xsltExtensionInstructionResultRegister( + xsltTransformContextPtr ctxt, + xmlXPathObjectPtr obj); +XSLTPUBFUN int XSLTCALL + xsltExtensionInstructionResultFinalize( + xsltTransformContextPtr ctxt); +XSLTPUBFUN int XSLTCALL + xsltFlagRVTs( + xsltTransformContextPtr ctxt, + xmlXPathObjectPtr obj, + int val); +XSLTPUBFUN void XSLTCALL + xsltFreeRVTs (xsltTransformContextPtr ctxt); +XSLTPUBFUN void XSLTCALL + xsltReleaseRVT (xsltTransformContextPtr ctxt, + xmlDocPtr RVT); +/* + * Extra functions for Attribute Value Templates + */ +XSLTPUBFUN void XSLTCALL + xsltCompileAttr (xsltStylesheetPtr style, + xmlAttrPtr attr); +XSLTPUBFUN xmlChar * XSLTCALL + xsltEvalAVT (xsltTransformContextPtr ctxt, + void *avt, + xmlNodePtr node); +XSLTPUBFUN void XSLTCALL + xsltFreeAVTList (void *avt); + +/* + * Extra function for successful xsltCleanupGlobals / xsltInit sequence. + */ + +XSLTPUBFUN void XSLTCALL + xsltUninit (void); + +/************************************************************************ + * * + * Compile-time functions for *internal* use only * + * * + ************************************************************************/ + +#ifdef XSLT_REFACTORED +XSLTPUBFUN void XSLTCALL + xsltParseSequenceConstructor( + xsltCompilerCtxtPtr cctxt, + xmlNodePtr start); +XSLTPUBFUN int XSLTCALL + xsltParseAnyXSLTElem (xsltCompilerCtxtPtr cctxt, + xmlNodePtr elem); +#ifdef XSLT_REFACTORED_XSLT_NSCOMP +XSLTPUBFUN int XSLTCALL + xsltRestoreDocumentNamespaces( + xsltNsMapPtr ns, + xmlDocPtr doc); +#endif +#endif /* XSLT_REFACTORED */ + +/************************************************************************ + * * + * Transformation-time functions for *internal* use only * + * * + ************************************************************************/ +XSLTPUBFUN int XSLTCALL + xsltInitCtxtKey (xsltTransformContextPtr ctxt, + xsltDocumentPtr doc, + xsltKeyDefPtr keyd); +XSLTPUBFUN int XSLTCALL + xsltInitAllDocKeys (xsltTransformContextPtr ctxt); +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLT_H__ */ + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltconfig.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltconfig.h new file mode 100644 index 000000000..e05f25308 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltconfig.h @@ -0,0 +1,146 @@ +/* + * Summary: compile-time version information for the XSLT engine + * Description: compile-time version information for the XSLT engine + * this module is autogenerated. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLTCONFIG_H__ +#define __XML_XSLTCONFIG_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * LIBXSLT_DOTTED_VERSION: + * + * the version string like "1.2.3" + */ +#define LIBXSLT_DOTTED_VERSION "1.1.43" + +/** + * LIBXSLT_VERSION: + * + * the version number: 1.2.3 value is 10203 + */ +#define LIBXSLT_VERSION 10143 + +/** + * LIBXSLT_VERSION_STRING: + * + * the version number string, 1.2.3 value is "10203" + */ +#define LIBXSLT_VERSION_STRING "10143" + +/** + * LIBXSLT_VERSION_EXTRA: + * + * extra version information, used to show a Git commit description + */ +#define LIBXSLT_VERSION_EXTRA "" + +/** + * WITH_XSLT_DEBUG: + * + * Activate the compilation of the debug reporting. Speed penalty + * is insignifiant and being able to run xsltpoc -v is useful. On + * by default unless --without-debug is passed to configure + */ +#if 1 +#define WITH_XSLT_DEBUG +#endif + +/** + * XSLT_NEED_TRIO: + * + * should be activated if the existing libc library lacks some of the + * string formatting function, in that case reuse the Trio ones already + * compiled in the libxml2 library. + */ + +#if 0 +#define XSLT_NEED_TRIO +#endif +#ifdef __VMS +#define HAVE_SYS_STAT_H 1 +#ifndef XSLT_NEED_TRIO +#define XSLT_NEED_TRIO +#endif +#endif + +#ifdef XSLT_NEED_TRIO +#define TRIO_REPLACE_STDIO +#endif + +/** + * WITH_XSLT_DEBUGGER: + * + * Activate the compilation of the debugger support. Speed penalty + * is insignifiant. + * On by default unless --without-debugger is passed to configure + */ +#if 0 +#ifndef WITH_DEBUGGER +#define WITH_DEBUGGER +#endif +#endif + +/** + * WITH_PROFILER: + * + * Activate the compilation of the profiler. Speed penalty + * is insignifiant. + * On by default unless --without-profiler is passed to configure + */ +#if 1 +#ifndef WITH_PROFILER +#define WITH_PROFILER +#endif +#endif + +/** + * WITH_MODULES: + * + * Whether module support is configured into libxslt + * Note: no default module path for win32 platforms + */ +#if 0 +#ifndef WITH_MODULES +#define WITH_MODULES +#endif +#define LIBXSLT_DEFAULT_PLUGINS_PATH() "" +#endif + +/** + * LIBXSLT_ATTR_FORMAT: + * + * This macro is used to indicate to GCC the parameters are printf-like + */ +#ifdef __GNUC__ +#define LIBXSLT_ATTR_FORMAT(fmt,args) __attribute__((__format__(__printf__,fmt,args))) +#else +#define LIBXSLT_ATTR_FORMAT(fmt,args) +#endif + +/** + * LIBXSLT_PUBLIC: + * + * This macro is used to declare PUBLIC variables for Cygwin and for MSC on Windows + */ +#if !defined LIBXSLT_PUBLIC +#if (defined(__CYGWIN__) || defined _MSC_VER) && !defined IN_LIBXSLT && !defined LIBXSLT_STATIC +#define LIBXSLT_PUBLIC __declspec(dllimport) +#else +#define LIBXSLT_PUBLIC +#endif +#endif + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLTCONFIG_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltexports.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltexports.h new file mode 100644 index 000000000..95c352fee --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltexports.h @@ -0,0 +1,64 @@ +/* + * Summary: macros for marking symbols as exportable/importable. + * Description: macros for marking symbols as exportable/importable. + * + * Copy: See Copyright for the status of this software. + */ + +#ifndef __XSLT_EXPORTS_H__ +#define __XSLT_EXPORTS_H__ + +#if defined(_WIN32) || defined(__CYGWIN__) +/** DOC_DISABLE */ + +#ifdef LIBXSLT_STATIC + #define XSLTPUBLIC +#elif defined(IN_LIBXSLT) + #define XSLTPUBLIC __declspec(dllexport) +#else + #define XSLTPUBLIC __declspec(dllimport) +#endif + +#define XSLTCALL __cdecl + +/** DOC_ENABLE */ +#else /* not Windows */ + +/** + * XSLTPUBLIC: + * + * Macro which declares a public symbol + */ +#define XSLTPUBLIC + +/** + * XSLTCALL: + * + * Macro which declares the calling convention for exported functions + */ +#define XSLTCALL + +#endif /* platform switch */ + +/* + * XSLTPUBFUN: + * + * Macro which declares an exportable function + */ +#define XSLTPUBFUN XSLTPUBLIC + +/** + * XSLTPUBVAR: + * + * Macro which declares an exportable variable + */ +#define XSLTPUBVAR XSLTPUBLIC extern + +/* Compatibility */ +#if !defined(LIBXSLT_PUBLIC) +#define LIBXSLT_PUBLIC XSLTPUBVAR +#endif + +#endif /* __XSLT_EXPORTS_H__ */ + + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltlocale.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltlocale.h new file mode 100644 index 000000000..c8be58d3d --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltlocale.h @@ -0,0 +1,44 @@ +/* + * Summary: Locale handling + * Description: Interfaces for locale handling. Needed for language dependent + * sorting. + * + * Copy: See Copyright for the status of this software. + * + * Author: Nick Wellnhofer + */ + +#ifndef __XML_XSLTLOCALE_H__ +#define __XML_XSLTLOCALE_H__ + +#include +#include "xsltexports.h" + +#ifdef __cplusplus +extern "C" { +#endif + +XSLTPUBFUN void * XSLTCALL + xsltNewLocale (const xmlChar *langName, + int lowerFirst); +XSLTPUBFUN void XSLTCALL + xsltFreeLocale (void *locale); +XSLTPUBFUN xmlChar * XSLTCALL + xsltStrxfrm (void *locale, + const xmlChar *string); +XSLTPUBFUN void XSLTCALL + xsltFreeLocales (void); + +/* Backward compatibility */ +typedef void *xsltLocale; +typedef xmlChar xsltLocaleChar; +XSLTPUBFUN int XSLTCALL + xsltLocaleStrcmp (void *locale, + const xmlChar *str1, + const xmlChar *str2); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLTLOCALE_H__ */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltutils.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltutils.h new file mode 100644 index 000000000..2514774b3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/include/libxslt/xsltutils.h @@ -0,0 +1,343 @@ +/* + * Summary: set of utilities for the XSLT engine + * Description: interfaces for the utilities module of the XSLT engine. + * things like message handling, profiling, and other + * generally useful routines. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XSLTUTILS_H__ +#define __XML_XSLTUTILS_H__ + +#include +#include +#include +#include +#include "xsltexports.h" +#include "xsltInternals.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XSLT_TODO: + * + * Macro to flag unimplemented blocks. + */ +#define XSLT_TODO \ + xsltGenericError(xsltGenericErrorContext, \ + "Unimplemented block at %s:%d\n", \ + __FILE__, __LINE__); + +/** + * XSLT_STRANGE: + * + * Macro to flag that a problem was detected internally. + */ +#define XSLT_STRANGE \ + xsltGenericError(xsltGenericErrorContext, \ + "Internal error at %s:%d\n", \ + __FILE__, __LINE__); + +/** + * IS_XSLT_ELEM: + * + * Checks that the element pertains to XSLT namespace. + */ +#define IS_XSLT_ELEM(n) \ + (((n) != NULL) && ((n)->type == XML_ELEMENT_NODE) && \ + ((n)->ns != NULL) && (xmlStrEqual((n)->ns->href, XSLT_NAMESPACE))) + +/** + * IS_XSLT_NAME: + * + * Checks the value of an element in XSLT namespace. + */ +#define IS_XSLT_NAME(n, val) \ + (xmlStrEqual((n)->name, (const xmlChar *) (val))) + +/** + * IS_XSLT_REAL_NODE: + * + * Check that a node is a 'real' one: document, element, text or attribute. + */ +#define IS_XSLT_REAL_NODE(n) \ + (((n) != NULL) && \ + (((n)->type == XML_ELEMENT_NODE) || \ + ((n)->type == XML_TEXT_NODE) || \ + ((n)->type == XML_CDATA_SECTION_NODE) || \ + ((n)->type == XML_ATTRIBUTE_NODE) || \ + ((n)->type == XML_DOCUMENT_NODE) || \ + ((n)->type == XML_HTML_DOCUMENT_NODE) || \ + ((n)->type == XML_COMMENT_NODE) || \ + ((n)->type == XML_PI_NODE))) + +/* + * Our own version of namespaced attributes lookup. + */ +XSLTPUBFUN xmlChar * XSLTCALL + xsltGetNsProp (xmlNodePtr node, + const xmlChar *name, + const xmlChar *nameSpace); +XSLTPUBFUN const xmlChar * XSLTCALL + xsltGetCNsProp (xsltStylesheetPtr style, + xmlNodePtr node, + const xmlChar *name, + const xmlChar *nameSpace); +XSLTPUBFUN int XSLTCALL + xsltGetUTF8Char (const unsigned char *utf, + int *len); +#ifdef IN_LIBXSLT +/** DOC_DISABLE */ +XSLTPUBFUN int XSLTCALL + xsltGetUTF8CharZ (const unsigned char *utf, + int *len); +/** DOC_ENABLE */ +#endif + +/* + * XSLT Debug Tracing Tracing Types + */ +typedef enum { + XSLT_TRACE_ALL = -1, + XSLT_TRACE_NONE = 0, + XSLT_TRACE_COPY_TEXT = 1<<0, + XSLT_TRACE_PROCESS_NODE = 1<<1, + XSLT_TRACE_APPLY_TEMPLATE = 1<<2, + XSLT_TRACE_COPY = 1<<3, + XSLT_TRACE_COMMENT = 1<<4, + XSLT_TRACE_PI = 1<<5, + XSLT_TRACE_COPY_OF = 1<<6, + XSLT_TRACE_VALUE_OF = 1<<7, + XSLT_TRACE_CALL_TEMPLATE = 1<<8, + XSLT_TRACE_APPLY_TEMPLATES = 1<<9, + XSLT_TRACE_CHOOSE = 1<<10, + XSLT_TRACE_IF = 1<<11, + XSLT_TRACE_FOR_EACH = 1<<12, + XSLT_TRACE_STRIP_SPACES = 1<<13, + XSLT_TRACE_TEMPLATES = 1<<14, + XSLT_TRACE_KEYS = 1<<15, + XSLT_TRACE_VARIABLES = 1<<16 +} xsltDebugTraceCodes; + +/** + * XSLT_TRACE: + * + * Control the type of xsl debugtrace messages emitted. + */ +#define XSLT_TRACE(ctxt,code,call) \ + if (ctxt->traceCode && (*(ctxt->traceCode) & code)) \ + call + +XSLTPUBFUN void XSLTCALL + xsltDebugSetDefaultTrace(xsltDebugTraceCodes val); +XSLTPUBFUN xsltDebugTraceCodes XSLTCALL + xsltDebugGetDefaultTrace(void); + +/* + * XSLT specific error and debug reporting functions. + */ +XSLTPUBVAR xmlGenericErrorFunc xsltGenericError; +XSLTPUBVAR void *xsltGenericErrorContext; +XSLTPUBVAR xmlGenericErrorFunc xsltGenericDebug; +XSLTPUBVAR void *xsltGenericDebugContext; + +XSLTPUBFUN void XSLTCALL + xsltPrintErrorContext (xsltTransformContextPtr ctxt, + xsltStylesheetPtr style, + xmlNodePtr node); +XSLTPUBFUN void XSLTCALL + xsltMessage (xsltTransformContextPtr ctxt, + xmlNodePtr node, + xmlNodePtr inst); +XSLTPUBFUN void XSLTCALL + xsltSetGenericErrorFunc (void *ctx, + xmlGenericErrorFunc handler); +XSLTPUBFUN void XSLTCALL + xsltSetGenericDebugFunc (void *ctx, + xmlGenericErrorFunc handler); +XSLTPUBFUN void XSLTCALL + xsltSetTransformErrorFunc (xsltTransformContextPtr ctxt, + void *ctx, + xmlGenericErrorFunc handler); +XSLTPUBFUN void XSLTCALL + xsltTransformError (xsltTransformContextPtr ctxt, + xsltStylesheetPtr style, + xmlNodePtr node, + const char *msg, + ...) LIBXSLT_ATTR_FORMAT(4,5); + +XSLTPUBFUN int XSLTCALL + xsltSetCtxtParseOptions (xsltTransformContextPtr ctxt, + int options); +/* + * Sorting. + */ + +XSLTPUBFUN void XSLTCALL + xsltDocumentSortFunction (xmlNodeSetPtr list); +XSLTPUBFUN void XSLTCALL + xsltSetSortFunc (xsltSortFunc handler); +XSLTPUBFUN void XSLTCALL + xsltSetCtxtSortFunc (xsltTransformContextPtr ctxt, + xsltSortFunc handler); +XSLTPUBFUN void XSLTCALL + xsltSetCtxtLocaleHandlers (xsltTransformContextPtr ctxt, + xsltNewLocaleFunc newLocale, + xsltFreeLocaleFunc freeLocale, + xsltGenSortKeyFunc genSortKey); +XSLTPUBFUN void XSLTCALL + xsltDefaultSortFunction (xsltTransformContextPtr ctxt, + xmlNodePtr *sorts, + int nbsorts); +XSLTPUBFUN void XSLTCALL + xsltDoSortFunction (xsltTransformContextPtr ctxt, + xmlNodePtr * sorts, + int nbsorts); +XSLTPUBFUN xmlXPathObjectPtr * XSLTCALL + xsltComputeSortResult (xsltTransformContextPtr ctxt, + xmlNodePtr sort); + +/* + * QNames handling. + */ + +XSLTPUBFUN const xmlChar * XSLTCALL + xsltSplitQName (xmlDictPtr dict, + const xmlChar *name, + const xmlChar **prefix); +XSLTPUBFUN const xmlChar * XSLTCALL + xsltGetQNameURI (xmlNodePtr node, + xmlChar **name); + +XSLTPUBFUN const xmlChar * XSLTCALL + xsltGetQNameURI2 (xsltStylesheetPtr style, + xmlNodePtr node, + const xmlChar **name); + +/* + * Output, reuse libxml I/O buffers. + */ +XSLTPUBFUN int XSLTCALL + xsltSaveResultTo (xmlOutputBufferPtr buf, + xmlDocPtr result, + xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltSaveResultToFilename (const char *URI, + xmlDocPtr result, + xsltStylesheetPtr style, + int compression); +XSLTPUBFUN int XSLTCALL + xsltSaveResultToFile (FILE *file, + xmlDocPtr result, + xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltSaveResultToFd (int fd, + xmlDocPtr result, + xsltStylesheetPtr style); +XSLTPUBFUN int XSLTCALL + xsltSaveResultToString (xmlChar **doc_txt_ptr, + int * doc_txt_len, + xmlDocPtr result, + xsltStylesheetPtr style); + +/* + * XPath interface + */ +XSLTPUBFUN xmlXPathCompExprPtr XSLTCALL + xsltXPathCompile (xsltStylesheetPtr style, + const xmlChar *str); +XSLTPUBFUN xmlXPathCompExprPtr XSLTCALL + xsltXPathCompileFlags (xsltStylesheetPtr style, + const xmlChar *str, + int flags); + +#ifdef IN_LIBXSLT +/** DOC_DISABLE */ +#define XSLT_SOURCE_NODE_MASK 15u +#define XSLT_SOURCE_NODE_HAS_KEY 1u +#define XSLT_SOURCE_NODE_HAS_ID 2u +int +xsltGetSourceNodeFlags(xmlNodePtr node); +int +xsltSetSourceNodeFlags(xsltTransformContextPtr ctxt, xmlNodePtr node, + int flags); +int +xsltClearSourceNodeFlags(xmlNodePtr node, int flags); +void ** +xsltGetPSVIPtr(xmlNodePtr cur); +/** DOC_ENABLE */ +#endif + +#ifdef WITH_PROFILER +/* + * Profiling. + */ +XSLTPUBFUN void XSLTCALL + xsltSaveProfiling (xsltTransformContextPtr ctxt, + FILE *output); +XSLTPUBFUN xmlDocPtr XSLTCALL + xsltGetProfileInformation (xsltTransformContextPtr ctxt); + +XSLTPUBFUN long XSLTCALL + xsltTimestamp (void); +XSLTPUBFUN void XSLTCALL + xsltCalibrateAdjust (long delta); +#endif + +/** + * XSLT_TIMESTAMP_TICS_PER_SEC: + * + * Sampling precision for profiling + */ +#define XSLT_TIMESTAMP_TICS_PER_SEC 100000l + +/* + * Hooks for the debugger. + */ + +typedef enum { + XSLT_DEBUG_NONE = 0, /* no debugging allowed */ + XSLT_DEBUG_INIT, + XSLT_DEBUG_STEP, + XSLT_DEBUG_STEPOUT, + XSLT_DEBUG_NEXT, + XSLT_DEBUG_STOP, + XSLT_DEBUG_CONT, + XSLT_DEBUG_RUN, + XSLT_DEBUG_RUN_RESTART, + XSLT_DEBUG_QUIT +} xsltDebugStatusCodes; + +XSLTPUBVAR int xslDebugStatus; + +typedef void (*xsltHandleDebuggerCallback) (xmlNodePtr cur, xmlNodePtr node, + xsltTemplatePtr templ, xsltTransformContextPtr ctxt); +typedef int (*xsltAddCallCallback) (xsltTemplatePtr templ, xmlNodePtr source); +typedef void (*xsltDropCallCallback) (void); + +XSLTPUBFUN int XSLTCALL + xsltGetDebuggerStatus (void); +#ifdef WITH_DEBUGGER +XSLTPUBFUN void XSLTCALL + xsltSetDebuggerStatus (int value); +XSLTPUBFUN int XSLTCALL + xsltSetDebuggerCallbacks (int no, void *block); +XSLTPUBFUN int XSLTCALL + xslAddCall (xsltTemplatePtr templ, + xmlNodePtr source); +XSLTPUBFUN void XSLTCALL + xslDropCall (void); +#endif + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_XSLTUTILS_H__ */ + + diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/libxml2_polyfill.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/libxml2_polyfill.c new file mode 100644 index 000000000..750b1b52a --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/libxml2_polyfill.c @@ -0,0 +1,114 @@ +#include + +#ifndef HAVE_XMLCTXTSETOPTIONS +/* based on libxml2-2.14.0-dev (1d8bd126) parser.c xmlCtxtSetInternalOptions */ +int +xmlCtxtSetOptions(xmlParserCtxtPtr ctxt, int options) +{ + int keepMask = 0; + int allMask; + + if (ctxt == NULL) { + return (-1); + } + + /* + * XInclude options aren't handled by the parser. + * + * XML_PARSE_XINCLUDE + * XML_PARSE_NOXINCNODE + * XML_PARSE_NOBASEFIX + */ + allMask = XML_PARSE_RECOVER | + XML_PARSE_NOENT | + XML_PARSE_DTDLOAD | + XML_PARSE_DTDATTR | + XML_PARSE_DTDVALID | + XML_PARSE_NOERROR | + XML_PARSE_NOWARNING | + XML_PARSE_PEDANTIC | + XML_PARSE_NOBLANKS | +#ifdef LIBXML_SAX1_ENABLED + XML_PARSE_SAX1 | +#endif + XML_PARSE_NONET | + XML_PARSE_NODICT | + XML_PARSE_NSCLEAN | + XML_PARSE_NOCDATA | + XML_PARSE_COMPACT | + XML_PARSE_OLD10 | + XML_PARSE_HUGE | + XML_PARSE_OLDSAX | + XML_PARSE_IGNORE_ENC | + XML_PARSE_BIG_LINES; + + ctxt->options = (ctxt->options & keepMask) | (options & allMask); + + /* + * For some options, struct members are historically the source + * of truth. The values are initalized from global variables and + * old code could also modify them directly. Several older API + * functions that don't take an options argument rely on these + * deprecated mechanisms. + * + * Once public access to struct members and the globals are + * disabled, we can use the options bitmask as source of + * truth, making all these struct members obsolete. + * + * The XML_DETECT_IDS flags is misnamed. It simply enables + * loading of the external subset. + */ + ctxt->recovery = (options & XML_PARSE_RECOVER) ? 1 : 0; + ctxt->replaceEntities = (options & XML_PARSE_NOENT) ? 1 : 0; + ctxt->loadsubset = (options & XML_PARSE_DTDLOAD) ? XML_DETECT_IDS : 0; + ctxt->loadsubset |= (options & XML_PARSE_DTDATTR) ? XML_COMPLETE_ATTRS : 0; + ctxt->validate = (options & XML_PARSE_DTDVALID) ? 1 : 0; + ctxt->pedantic = (options & XML_PARSE_PEDANTIC) ? 1 : 0; + ctxt->keepBlanks = (options & XML_PARSE_NOBLANKS) ? 0 : 1; + ctxt->dictNames = (options & XML_PARSE_NODICT) ? 0 : 1; + + /* + * Changing SAX callbacks is a bad idea. This should be fixed. + */ + if (options & XML_PARSE_NOBLANKS) { + ctxt->sax->ignorableWhitespace = xmlSAX2IgnorableWhitespace; + } + if (options & XML_PARSE_NOCDATA) { + ctxt->sax->cdataBlock = NULL; + } + if (options & XML_PARSE_HUGE) { + if (ctxt->dict != NULL) { + xmlDictSetLimit(ctxt->dict, 0); + } + } + + ctxt->linenumbers = 1; + + return (options & ~allMask); +} +#endif + +#ifndef HAVE_XMLCTXTGETOPTIONS +int +xmlCtxtGetOptions(xmlParserCtxtPtr ctxt) +{ + return (ctxt->options); +} +#endif + +#ifndef HAVE_XMLSWITCHENCODINGNAME +int +xmlSwitchEncodingName(xmlParserCtxtPtr ctxt, const char *encoding) +{ + if (ctxt == NULL) { + return (-1); + } + + xmlCharEncodingHandlerPtr handler = xmlFindCharEncodingHandler(encoding); + if (handler == NULL) { + return (-1); + } + + return (xmlSwitchToEncoding(ctxt, handler)); +} +#endif diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.c new file mode 100644 index 000000000..a43813b9c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.c @@ -0,0 +1,294 @@ +#include + +VALUE mNokogiri ; +VALUE mNokogiriGumbo ; +VALUE mNokogiriHtml4 ; +VALUE mNokogiriHtml4Sax ; +VALUE mNokogiriHtml5 ; +VALUE mNokogiriXml ; +VALUE mNokogiriXmlSax ; +VALUE mNokogiriXmlXpath ; +VALUE mNokogiriXslt ; + +VALUE cNokogiriSyntaxError; +VALUE cNokogiriXmlCharacterData; +VALUE cNokogiriXmlElement; +VALUE cNokogiriXmlXpathSyntaxError; + +void noko_init_xml_attr(void); +void noko_init_xml_attribute_decl(void); +void noko_init_xml_cdata(void); +void noko_init_xml_comment(void); +void noko_init_xml_document(void); +void noko_init_xml_document_fragment(void); +void noko_init_xml_dtd(void); +void noko_init_xml_element_content(void); +void noko_init_xml_element_decl(void); +void noko_init_xml_encoding_handler(void); +void noko_init_xml_entity_decl(void); +void noko_init_xml_entity_reference(void); +void noko_init_xml_namespace(void); +void noko_init_xml_node(void); +void noko_init_xml_node_set(void); +void noko_init_xml_processing_instruction(void); +void noko_init_xml_reader(void); +void noko_init_xml_relax_ng(void); +void noko_init_xml_sax_parser(void); +void noko_init_xml_sax_parser_context(void); +void noko_init_xml_sax_push_parser(void); +void noko_init_xml_schema(void); +void noko_init_xml_syntax_error(void); +void noko_init_xml_text(void); +void noko_init_xml_xpath_context(void); +void noko_init_xslt_stylesheet(void); +void noko_init_html_document(void); +void noko_init_html_element_description(void); +void noko_init_html_entity_lookup(void); +void noko_init_html_sax_parser_context(void); +void noko_init_html_sax_push_parser(void); +void noko_init_html4_sax_parser(void); +void noko_init_gumbo(void); +void noko_init_test_global_handlers(void); + +static ID id_read, id_write, id_external_encoding; + + +static VALUE +noko_io_read_check(VALUE val) +{ + VALUE *args = (VALUE *)val; + return rb_funcall(args[0], id_read, 1, args[1]); +} + + +static VALUE +noko_io_read_failed(VALUE arg, VALUE exc) +{ + return Qundef; +} + + +int +noko_io_read(void *io, char *c_buffer, int c_buffer_len) +{ + VALUE rb_io = (VALUE)io; + VALUE rb_read_string, rb_args[2]; + size_t n_bytes_read, safe_len; + + rb_args[0] = rb_io; + rb_args[1] = INT2NUM(c_buffer_len); + + rb_read_string = rb_rescue(noko_io_read_check, (VALUE)rb_args, noko_io_read_failed, 0); + + if (NIL_P(rb_read_string)) { return 0; } + if (rb_read_string == Qundef) { return -1; } + if (TYPE(rb_read_string) != T_STRING) { return -1; } + + n_bytes_read = (size_t)RSTRING_LEN(rb_read_string); + safe_len = (n_bytes_read > (size_t)c_buffer_len) ? (size_t)c_buffer_len : n_bytes_read; + memcpy(c_buffer, StringValuePtr(rb_read_string), safe_len); + + return (int)safe_len; +} + + +static VALUE +noko_io_write_check(VALUE rb_args) +{ + VALUE rb_io = ((VALUE *)rb_args)[0]; + VALUE rb_output = ((VALUE *)rb_args)[1]; + return rb_funcall(rb_io, id_write, 1, rb_output); +} + + +static VALUE +noko_io_write_failed(VALUE arg, VALUE exc) +{ + return Qundef; +} + + +int +noko_io_write(void *io, char *c_buffer, int c_buffer_len) +{ + VALUE rb_args[2], rb_n_bytes_written; + VALUE rb_io = (VALUE)io; + VALUE rb_enc = Qnil; + rb_encoding *io_encoding; + + if (rb_respond_to(rb_io, id_external_encoding)) { + rb_enc = rb_funcall(rb_io, id_external_encoding, 0); + } + io_encoding = RB_NIL_P(rb_enc) ? rb_ascii8bit_encoding() : rb_to_encoding(rb_enc); + + rb_args[0] = rb_io; + rb_args[1] = rb_enc_str_new(c_buffer, (long)c_buffer_len, io_encoding); + + rb_n_bytes_written = rb_rescue(noko_io_write_check, (VALUE)rb_args, noko_io_write_failed, 0); + if (rb_n_bytes_written == Qundef) { return -1; } + + return NUM2INT(rb_n_bytes_written); +} + + +int +noko_io_close(void *io) +{ + return 0; +} + + +#if defined(_WIN32) && !defined(NOKOGIRI_PACKAGED_LIBRARIES) +# define NOKOGIRI_WINDOWS_DLLS 1 +#else +# define NOKOGIRI_WINDOWS_DLLS 0 +#endif + +// +// | dlls || true | false | +// | nlmm || | | +// |-----------++---------+---------| +// | NULL || default | ruby | +// | "random" || default | ruby | +// | "ruby" || ruby | ruby | +// | "default" || default | default | +// +// We choose *not* to use Ruby's memory management functions with windows DLLs because of this +// issue: https://github.com/sparklemotion/nokogiri/issues/2241 +// +static void +set_libxml_memory_management(void) +{ + const char *nlmm = getenv("NOKOGIRI_LIBXML_MEMORY_MANAGEMENT"); + if (nlmm) { + if (strcmp(nlmm, "default") == 0) { + goto libxml_uses_default_memory_management; + } else if (strcmp(nlmm, "ruby") == 0) { + goto libxml_uses_ruby_memory_management; + } + } + if (NOKOGIRI_WINDOWS_DLLS) { +libxml_uses_default_memory_management: + rb_const_set(mNokogiri, rb_intern("LIBXML_MEMORY_MANAGEMENT"), NOKOGIRI_STR_NEW2("default")); + return; + } else { +libxml_uses_ruby_memory_management: + rb_const_set(mNokogiri, rb_intern("LIBXML_MEMORY_MANAGEMENT"), NOKOGIRI_STR_NEW2("ruby")); + xmlMemSetup((xmlFreeFunc)ruby_xfree, (xmlMallocFunc)ruby_xmalloc, (xmlReallocFunc)ruby_xrealloc, ruby_strdup); + return; + } +} + + +void +Init_nokogiri(void) +{ + mNokogiri = rb_define_module("Nokogiri"); + mNokogiriGumbo = rb_define_module_under(mNokogiri, "Gumbo"); + mNokogiriHtml4 = rb_define_module_under(mNokogiri, "HTML4"); + mNokogiriHtml4Sax = rb_define_module_under(mNokogiriHtml4, "SAX"); + mNokogiriHtml5 = rb_define_module_under(mNokogiri, "HTML5"); + mNokogiriXml = rb_define_module_under(mNokogiri, "XML"); + mNokogiriXmlSax = rb_define_module_under(mNokogiriXml, "SAX"); + mNokogiriXmlXpath = rb_define_module_under(mNokogiriXml, "XPath"); + mNokogiriXslt = rb_define_module_under(mNokogiri, "XSLT"); + + set_libxml_memory_management(); /* must be before any function calls that might invoke xmlInitParser() */ + xmlInitParser(); + exsltRegisterAll(); + + rb_const_set(mNokogiri, rb_intern("LIBXML_COMPILED_VERSION"), NOKOGIRI_STR_NEW2(LIBXML_DOTTED_VERSION)); + rb_const_set(mNokogiri, rb_intern("LIBXML_LOADED_VERSION"), NOKOGIRI_STR_NEW2(xmlParserVersion)); + + rb_const_set(mNokogiri, rb_intern("LIBXSLT_COMPILED_VERSION"), NOKOGIRI_STR_NEW2(LIBXSLT_DOTTED_VERSION)); + rb_const_set(mNokogiri, rb_intern("LIBXSLT_LOADED_VERSION"), NOKOGIRI_STR_NEW2(xsltEngineVersion)); + + rb_const_set(mNokogiri, rb_intern("LIBXML_ZLIB_ENABLED"), + xmlHasFeature(XML_WITH_ZLIB) == 1 ? Qtrue : Qfalse); + +#ifdef NOKOGIRI_PACKAGED_LIBRARIES + rb_const_set(mNokogiri, rb_intern("PACKAGED_LIBRARIES"), Qtrue); +# ifdef NOKOGIRI_PRECOMPILED_LIBRARIES + rb_const_set(mNokogiri, rb_intern("PRECOMPILED_LIBRARIES"), Qtrue); +# else + rb_const_set(mNokogiri, rb_intern("PRECOMPILED_LIBRARIES"), Qfalse); +# endif + rb_const_set(mNokogiri, rb_intern("LIBXML2_PATCHES"), rb_str_split(NOKOGIRI_STR_NEW2(NOKOGIRI_LIBXML2_PATCHES), " ")); + rb_const_set(mNokogiri, rb_intern("LIBXSLT_PATCHES"), rb_str_split(NOKOGIRI_STR_NEW2(NOKOGIRI_LIBXSLT_PATCHES), " ")); +#else + rb_const_set(mNokogiri, rb_intern("PACKAGED_LIBRARIES"), Qfalse); + rb_const_set(mNokogiri, rb_intern("PRECOMPILED_LIBRARIES"), Qfalse); + rb_const_set(mNokogiri, rb_intern("LIBXML2_PATCHES"), Qnil); + rb_const_set(mNokogiri, rb_intern("LIBXSLT_PATCHES"), Qnil); +#endif + +#ifdef LIBXML_ICONV_ENABLED + rb_const_set(mNokogiri, rb_intern("LIBXML_ICONV_ENABLED"), Qtrue); +#else + rb_const_set(mNokogiri, rb_intern("LIBXML_ICONV_ENABLED"), Qfalse); +#endif + +#ifdef NOKOGIRI_OTHER_LIBRARY_VERSIONS + rb_const_set(mNokogiri, rb_intern("OTHER_LIBRARY_VERSIONS"), NOKOGIRI_STR_NEW2(NOKOGIRI_OTHER_LIBRARY_VERSIONS)); +#endif + + if (xsltExtModuleFunctionLookup((const xmlChar *)"date-time", EXSLT_DATE_NAMESPACE)) { + rb_const_set(mNokogiri, rb_intern("LIBXSLT_DATETIME_ENABLED"), Qtrue); + } else { + rb_const_set(mNokogiri, rb_intern("LIBXSLT_DATETIME_ENABLED"), Qfalse); + } + + cNokogiriSyntaxError = rb_define_class_under(mNokogiri, "SyntaxError", rb_eStandardError); + noko_init_xml_syntax_error(); + assert(cNokogiriXmlSyntaxError); + cNokogiriXmlXpathSyntaxError = rb_define_class_under(mNokogiriXmlXpath, "SyntaxError", cNokogiriXmlSyntaxError); + + noko_init_xml_element_content(); + noko_init_xml_encoding_handler(); + noko_init_xml_namespace(); + noko_init_xml_node_set(); + noko_init_xml_reader(); + + noko_init_xml_sax_parser(); + noko_init_html4_sax_parser(); + + noko_init_xml_xpath_context(); + noko_init_xslt_stylesheet(); + noko_init_html_element_description(); + noko_init_html_entity_lookup(); + + noko_init_xml_schema(); + noko_init_xml_relax_ng(); + + noko_init_xml_sax_parser_context(); + noko_init_html_sax_parser_context(); + + noko_init_xml_sax_push_parser(); + noko_init_html_sax_push_parser(); + + noko_init_xml_node(); + noko_init_xml_attr(); + noko_init_xml_attribute_decl(); + noko_init_xml_dtd(); + noko_init_xml_element_decl(); + noko_init_xml_entity_decl(); + noko_init_xml_entity_reference(); + noko_init_xml_processing_instruction(); + assert(cNokogiriXmlNode); + cNokogiriXmlElement = rb_define_class_under(mNokogiriXml, "Element", cNokogiriXmlNode); + cNokogiriXmlCharacterData = rb_define_class_under(mNokogiriXml, "CharacterData", cNokogiriXmlNode); + noko_init_xml_comment(); + noko_init_xml_text(); + noko_init_xml_cdata(); + + noko_init_xml_document_fragment(); + noko_init_xml_document(); + noko_init_html_document(); + noko_init_gumbo(); + + noko_init_test_global_handlers(); + + id_read = rb_intern("read"); + id_write = rb_intern("write"); + id_external_encoding = rb_intern("external_encoding"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.h b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.h new file mode 100644 index 000000000..b75ebc47f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/nokogiri.h @@ -0,0 +1,238 @@ +#ifndef NOKOGIRI_NATIVE +#define NOKOGIRI_NATIVE + +#include // https://github.com/sparklemotion/nokogiri/issues/2696 + +#ifdef _MSC_VER +# ifndef WIN32_LEAN_AND_MEAN +# define WIN32_LEAN_AND_MEAN +# endif /* WIN32_LEAN_AND_MEAN */ + +# ifndef WIN32 +# define WIN32 +# endif /* WIN32 */ + +# include +# include +# include +#endif + +#ifdef _WIN32 +# define NOKOPUBFUN __declspec(dllexport) +# define NOKOPUBVAR __declspec(dllexport) extern +#else +# define NOKOPUBFUN +# define NOKOPUBVAR extern +#endif + +#include +#include +#include +#include +#include + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include + +/* libxml2_polyfill.c */ +#ifndef HAVE_XMLCTXTSETOPTIONS +int xmlCtxtSetOptions(xmlParserCtxtPtr ctxt, int options); +#endif +#ifndef HAVE_XMLCTXTGETOPTIONS +int xmlCtxtGetOptions(xmlParserCtxtPtr ctxt); +#endif +#ifndef HAVE_XMLSWITCHENCODINGNAME +int xmlSwitchEncodingName(xmlParserCtxtPtr ctxt, const char *encoding); +#endif + +#define XMLNS_PREFIX "xmlns" +#define XMLNS_PREFIX_LEN 6 /* including either colon or \0 */ + +#ifndef xmlErrorConstPtr +# if LIBXML_VERSION >= 21200 +# define xmlErrorConstPtr const xmlError * +# else +# define xmlErrorConstPtr xmlError * +# endif +#endif + +#include +#include +#include +#include +#include + +#define NOKOGIRI_STR_NEW2(str) NOKOGIRI_STR_NEW(str, strlen((const char *)(str))) +#define NOKOGIRI_STR_NEW(str, len) rb_external_str_new_with_enc((const char *)(str), (long)(len), rb_utf8_encoding()) +#define RBSTR_OR_QNIL(_str) (_str ? NOKOGIRI_STR_NEW2(_str) : Qnil) + +#ifndef NORETURN_DECL +# if defined(__GNUC__) +# define NORETURN_DECL __attribute__ ((noreturn)) +# else +# define NORETURN_DECL +# endif +#endif + +#ifndef PRINTFLIKE_DECL +# if defined(__GNUC__) +# define PRINTFLIKE_DECL(stringidx, argidx) __attribute__ ((format(printf,stringidx,argidx))) +# else +# define PRINTFLIKE_DECL(stringidx, argidx) +# endif +#endif + +#if defined(TRUFFLERUBY) && !defined(NOKOGIRI_PACKAGED_LIBRARIES) +# define TRUFFLERUBY_NOKOGIRI_SYSTEM_LIBRARIES +#endif + +NOKOPUBVAR VALUE mNokogiri ; +NOKOPUBVAR VALUE mNokogiriGumbo ; +NOKOPUBVAR VALUE mNokogiriHtml4 ; +NOKOPUBVAR VALUE mNokogiriHtml4Sax ; +NOKOPUBVAR VALUE mNokogiriHtml5 ; +NOKOPUBVAR VALUE mNokogiriXml ; +NOKOPUBVAR VALUE mNokogiriXmlSax ; +NOKOPUBVAR VALUE mNokogiriXmlXpath ; +NOKOPUBVAR VALUE mNokogiriXslt ; + +NOKOPUBVAR VALUE cNokogiriEncodingHandler; +NOKOPUBVAR VALUE cNokogiriSyntaxError; +NOKOPUBVAR VALUE cNokogiriXmlAttr; +NOKOPUBVAR VALUE cNokogiriXmlAttributeDecl; +NOKOPUBVAR VALUE cNokogiriXmlCData; +NOKOPUBVAR VALUE cNokogiriXmlCharacterData; +NOKOPUBVAR VALUE cNokogiriXmlComment; +NOKOPUBVAR VALUE cNokogiriXmlDocument ; +NOKOPUBVAR VALUE cNokogiriXmlDocumentFragment; +NOKOPUBVAR VALUE cNokogiriXmlDtd; +NOKOPUBVAR VALUE cNokogiriXmlElement ; +NOKOPUBVAR VALUE cNokogiriXmlElementContent; +NOKOPUBVAR VALUE cNokogiriXmlElementDecl; +NOKOPUBVAR VALUE cNokogiriXmlEntityDecl; +NOKOPUBVAR VALUE cNokogiriXmlEntityReference; +NOKOPUBVAR VALUE cNokogiriXmlNamespace ; +NOKOPUBVAR VALUE cNokogiriXmlNode ; +NOKOPUBVAR VALUE cNokogiriXmlNodeSet ; +NOKOPUBVAR VALUE cNokogiriXmlProcessingInstruction; +NOKOPUBVAR VALUE cNokogiriXmlReader; +NOKOPUBVAR VALUE cNokogiriXmlRelaxNG; +NOKOPUBVAR VALUE cNokogiriXmlSaxParser ; +NOKOPUBVAR VALUE cNokogiriXmlSaxParserContext; +NOKOPUBVAR VALUE cNokogiriXmlSaxPushParser ; +NOKOPUBVAR VALUE cNokogiriXmlSchema; +NOKOPUBVAR VALUE cNokogiriXmlSyntaxError; +NOKOPUBVAR VALUE cNokogiriXmlText ; +NOKOPUBVAR VALUE cNokogiriXmlXpathContext; +NOKOPUBVAR VALUE cNokogiriXmlXpathSyntaxError; +NOKOPUBVAR VALUE cNokogiriXsltStylesheet ; + +NOKOPUBVAR VALUE cNokogiriHtml4Document ; +NOKOPUBVAR VALUE cNokogiriHtml4SaxPushParser ; +NOKOPUBVAR VALUE cNokogiriHtml4ElementDescription ; +NOKOPUBVAR VALUE cNokogiriHtml4SaxParser; +NOKOPUBVAR VALUE cNokogiriHtml4SaxParserContext; +NOKOPUBVAR VALUE cNokogiriHtml5Document ; + +typedef struct _nokogiriTuple { + VALUE doc; + st_table *unlinkedNodes; + VALUE node_cache; +} nokogiriTuple; +typedef nokogiriTuple *nokogiriTuplePtr; + +typedef struct _libxmlStructuredErrorHandlerState { + void *user_data; + xmlStructuredErrorFunc handler; +} libxmlStructuredErrorHandlerState ; + +typedef struct _nokogiriXsltStylesheetTuple { + xsltStylesheetPtr ss; + VALUE func_instances; +} nokogiriXsltStylesheetTuple; + +void noko_xml_document_pin_node(xmlNodePtr); +void noko_xml_document_pin_namespace(xmlNsPtr, xmlDocPtr); +int noko_xml_document_has_wrapped_blank_nodes_p(xmlDocPtr c_document); + +int noko_io_read(void *ctx, char *buffer, int len); +int noko_io_write(void *ctx, char *buffer, int len); +int noko_io_close(void *ctx); + +#define Noko_Node_Get_Struct(obj,type,sval) ((sval) = (type*)DATA_PTR(obj)) +#define Noko_Namespace_Get_Struct(obj,type,sval) ((sval) = (type*)DATA_PTR(obj)) + +VALUE noko_xml_node_wrap(VALUE klass, xmlNodePtr node) ; +VALUE noko_xml_node_wrap_node_set_result(xmlNodePtr node, VALUE node_set) ; +VALUE noko_xml_node_attrs(xmlNodePtr node) ; + +VALUE noko_xml_namespace_wrap(xmlNsPtr node, xmlDocPtr doc); +VALUE noko_xml_namespace_wrap_xpath_copy(xmlNsPtr node); + +VALUE noko_xml_element_content_wrap(VALUE doc, xmlElementContentPtr element); + +VALUE noko_xml_node_set_wrap(xmlNodeSetPtr node_set, VALUE document) ; +xmlNodeSetPtr noko_xml_node_set_unwrap(VALUE rb_node_set) ; + +VALUE noko_xml_document_wrap_with_init_args(VALUE klass, xmlDocPtr doc, int argc, VALUE *argv); +VALUE noko_xml_document_wrap(VALUE klass, xmlDocPtr doc); +xmlDocPtr noko_xml_document_unwrap(VALUE rb_document); +NOKOPUBFUN VALUE Nokogiri_wrap_xml_document(VALUE klass, + xmlDocPtr doc); /* deprecated. use noko_xml_document_wrap() instead. */ + +xmlSAXHandlerPtr noko_xml_sax_parser_unwrap(VALUE rb_sax_handler); + +xmlParserCtxtPtr noko_xml_sax_push_parser_unwrap(VALUE rb_parser); + +VALUE noko_xml_sax_parser_context_wrap(VALUE klass, xmlParserCtxtPtr c_context); +xmlParserCtxtPtr noko_xml_sax_parser_context_unwrap(VALUE rb_context); +void noko_xml_sax_parser_context_set_encoding(xmlParserCtxtPtr c_context, VALUE rb_encoding); + +#define DOC_RUBY_OBJECT_TEST(x) ((nokogiriTuplePtr)(x->_private)) +#define DOC_RUBY_OBJECT(x) (((nokogiriTuplePtr)(x->_private))->doc) +#define DOC_UNLINKED_NODE_HASH(x) (((nokogiriTuplePtr)(x->_private))->unlinkedNodes) +#define DOC_NODE_CACHE(x) (((nokogiriTuplePtr)(x->_private))->node_cache) +#define NOKOGIRI_NAMESPACE_EH(node) ((node)->type == XML_NAMESPACE_DECL) + +#define DISCARD_CONST_QUAL(t, v) ((t)(uintptr_t)(v)) +#define DISCARD_CONST_QUAL_XMLCHAR(v) DISCARD_CONST_QUAL(xmlChar *, v) + +#if HAVE_RB_CATEGORY_WARNING +# define NOKO_WARN_DEPRECATION(message...) rb_category_warning(RB_WARN_CATEGORY_DEPRECATED, message) +#else +# define NOKO_WARN_DEPRECATION(message...) rb_warning(message) +#endif + +void noko__structured_error_func_save(libxmlStructuredErrorHandlerState *handler_state); +void noko__structured_error_func_save_and_set(libxmlStructuredErrorHandlerState *handler_state, void *user_data, + xmlStructuredErrorFunc handler); +void noko__structured_error_func_restore(libxmlStructuredErrorHandlerState *handler_state); +VALUE noko_xml_syntax_error__wrap(xmlErrorConstPtr error); +void noko__error_array_pusher(void *ctx, xmlErrorConstPtr error); +NORETURN_DECL void noko__error_raise(void *ctx, xmlErrorConstPtr error); +void Nokogiri_marshal_xpath_funcall_and_return_values(xmlXPathParserContextPtr ctx, int nargs, VALUE handler, + const char *function_name) ; + +#endif /* NOKOGIRI_NATIVE */ diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/test_global_handlers.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/test_global_handlers.c new file mode 100644 index 000000000..cec0915fe --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/test_global_handlers.c @@ -0,0 +1,40 @@ +#include + +static VALUE foreign_error_handler_block = Qnil; + +static void +foreign_error_handler(void *user_data, xmlErrorConstPtr c_error) +{ + rb_funcall(foreign_error_handler_block, rb_intern("call"), 0); +} + +/* + * call-seq: + * __foreign_error_handler { ... } -> nil + * + * Override libxml2's global error handlers to call the block. This method thus has very little + * value except to test that Nokogiri is properly setting error handlers elsewhere in the code. See + * test/helper.rb for how this is being used. + */ +static VALUE +rb_foreign_error_handler(VALUE klass) +{ + rb_need_block(); + foreign_error_handler_block = rb_block_proc(); + xmlSetStructuredErrorFunc(NULL, foreign_error_handler); + return Qnil; +} + +/* + * Document-module: Nokogiri::Test + * + * The Nokogiri::Test module should only be used for testing Nokogiri. + * Do NOT use this outside of the Nokogiri test suite. + */ +void +noko_init_test_global_handlers(void) +{ + VALUE mNokogiriTest = rb_define_module_under(mNokogiri, "Test"); + + rb_define_singleton_method(mNokogiriTest, "__foreign_error_handler", rb_foreign_error_handler, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attr.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attr.c new file mode 100644 index 000000000..90eea3c73 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attr.c @@ -0,0 +1,103 @@ +#include + +VALUE cNokogiriXmlAttr; + +/* + * call-seq: + * value=(content) + * + * Set the value for this Attr to +content+. Use +nil+ to remove the value + * (e.g., a HTML boolean attribute). + */ +static VALUE +set_value(VALUE self, VALUE content) +{ + xmlAttrPtr attr; + xmlChar *value; + xmlNode *cur; + + Noko_Node_Get_Struct(self, xmlAttr, attr); + + if (attr->children) { + xmlFreeNodeList(attr->children); + } + attr->children = attr->last = NULL; + + if (content == Qnil) { + return content; + } + + value = xmlEncodeEntitiesReentrant(attr->doc, (unsigned char *)StringValueCStr(content)); + if (xmlStrlen(value) == 0) { + attr->children = xmlNewDocText(attr->doc, value); + } else { + attr->children = xmlStringGetNodeList(attr->doc, value); + } + xmlFree(value); + + for (cur = attr->children; cur; cur = cur->next) { + cur->parent = (xmlNode *)attr; + cur->doc = attr->doc; + if (cur->next == NULL) { + attr->last = cur; + } + } + + return content; +} + +/* + * call-seq: + * new(document, name) + * + * Create a new Attr element on the +document+ with +name+ + */ +static VALUE +new (int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr xml_doc; + VALUE document; + VALUE name; + VALUE rest; + xmlAttrPtr node; + VALUE rb_node; + + rb_scan_args(argc, argv, "2*", &document, &name, &rest); + + if (! rb_obj_is_kind_of(document, cNokogiriXmlDocument)) { + rb_raise(rb_eArgError, "parameter must be a Nokogiri::XML::Document"); + } + + xml_doc = noko_xml_document_unwrap(document); + + node = xmlNewDocProp( + xml_doc, + (const xmlChar *)StringValueCStr(name), + NULL + ); + + noko_xml_document_pin_node((xmlNodePtr)node); + + rb_node = noko_xml_node_wrap(klass, (xmlNodePtr)node); + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { + rb_yield(rb_node); + } + + return rb_node; +} + +void +noko_init_xml_attr(void) +{ + assert(cNokogiriXmlNode); + /* + * Attr represents a Attr node in an xml document. + */ + cNokogiriXmlAttr = rb_define_class_under(mNokogiriXml, "Attr", cNokogiriXmlNode); + + rb_define_singleton_method(cNokogiriXmlAttr, "new", new, -1); + + rb_define_method(cNokogiriXmlAttr, "value=", set_value, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attribute_decl.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attribute_decl.c new file mode 100644 index 000000000..3f9bebc88 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_attribute_decl.c @@ -0,0 +1,70 @@ +#include + +VALUE cNokogiriXmlAttributeDecl; + +/* + * call-seq: + * attribute_type + * + * The attribute_type for this AttributeDecl + */ +static VALUE +attribute_type(VALUE self) +{ + xmlAttributePtr node; + Noko_Node_Get_Struct(self, xmlAttribute, node); + return INT2NUM(node->atype); +} + +/* + * call-seq: + * default + * + * The default value + */ +static VALUE +default_value(VALUE self) +{ + xmlAttributePtr node; + Noko_Node_Get_Struct(self, xmlAttribute, node); + + if (node->defaultValue) { return NOKOGIRI_STR_NEW2(node->defaultValue); } + return Qnil; +} + +/* + * call-seq: + * enumeration + * + * An enumeration of possible values + */ +static VALUE +enumeration(VALUE self) +{ + xmlAttributePtr node; + xmlEnumerationPtr enm; + VALUE list; + + Noko_Node_Get_Struct(self, xmlAttribute, node); + + list = rb_ary_new(); + enm = node->tree; + + while (enm) { + rb_ary_push(list, NOKOGIRI_STR_NEW2(enm->name)); + enm = enm->next; + } + + return list; +} + +void +noko_init_xml_attribute_decl(void) +{ + assert(cNokogiriXmlNode); + cNokogiriXmlAttributeDecl = rb_define_class_under(mNokogiriXml, "AttributeDecl", cNokogiriXmlNode); + + rb_define_method(cNokogiriXmlAttributeDecl, "attribute_type", attribute_type, 0); + rb_define_method(cNokogiriXmlAttributeDecl, "default", default_value, 0); + rb_define_method(cNokogiriXmlAttributeDecl, "enumeration", enumeration, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_cdata.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_cdata.c new file mode 100644 index 000000000..4431d200f --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_cdata.c @@ -0,0 +1,62 @@ +#include + +VALUE cNokogiriXmlCData; + +/* + * call-seq: + * new(document, content) + * + * Create a new CDATA element on the +document+ with +content+ + * + * If +content+ cannot be implicitly converted to a string, this method will + * raise a TypeError exception. + */ +static VALUE +rb_xml_cdata_s_new(int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr c_document; + xmlNodePtr c_node; + VALUE rb_document; + VALUE rb_content; + VALUE rb_rest; + VALUE rb_node; + + rb_scan_args(argc, argv, "2*", &rb_document, &rb_content, &rb_rest); + + Check_Type(rb_content, T_STRING); + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlNode)) { + rb_raise(rb_eTypeError, + "expected first parameter to be a Nokogiri::XML::Document, received %"PRIsVALUE, + rb_obj_class(rb_document)); + } + + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlDocument)) { + xmlNodePtr deprecated_node_type_arg; + NOKO_WARN_DEPRECATION("Passing a Node as the first parameter to CDATA.new is deprecated. Please pass a Document instead. This will become an error in Nokogiri v1.17.0."); // TODO: deprecated in v1.15.3, remove in v1.17.0 + Noko_Node_Get_Struct(rb_document, xmlNode, deprecated_node_type_arg); + c_document = deprecated_node_type_arg->doc; + } else { + c_document = noko_xml_document_unwrap(rb_document); + } + + c_node = xmlNewCDataBlock(c_document, (xmlChar *)StringValueCStr(rb_content), RSTRING_LENINT(rb_content)); + noko_xml_document_pin_node(c_node); + rb_node = noko_xml_node_wrap(klass, c_node); + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { rb_yield(rb_node); } + + return rb_node; +} + +void +noko_init_xml_cdata(void) +{ + assert(cNokogiriXmlText); + /* + * CData represents a CData node in an xml document. + */ + cNokogiriXmlCData = rb_define_class_under(mNokogiriXml, "CDATA", cNokogiriXmlText); + + rb_define_singleton_method(cNokogiriXmlCData, "new", rb_xml_cdata_s_new, -1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_comment.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_comment.c new file mode 100644 index 000000000..211761c9c --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_comment.c @@ -0,0 +1,57 @@ +#include + +VALUE cNokogiriXmlComment; + +static ID document_id ; + +/* + * call-seq: + * new(document_or_node, content) + * + * Create a new Comment element on the +document+ with +content+. + * Alternatively, if a +node+ is passed, the +node+'s document is used. + */ +static VALUE +new (int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr xml_doc; + xmlNodePtr node; + VALUE document; + VALUE content; + VALUE rest; + VALUE rb_node; + + rb_scan_args(argc, argv, "2*", &document, &content, &rest); + + Check_Type(content, T_STRING); + if (rb_obj_is_kind_of(document, cNokogiriXmlNode)) { + document = rb_funcall(document, document_id, 0); + } else if (!rb_obj_is_kind_of(document, cNokogiriXmlDocument) + && !rb_obj_is_kind_of(document, cNokogiriXmlDocumentFragment)) { + rb_raise(rb_eArgError, "first argument must be a XML::Document or XML::Node"); + } + xml_doc = noko_xml_document_unwrap(document); + + node = xmlNewDocComment(xml_doc, (const xmlChar *)StringValueCStr(content)); + noko_xml_document_pin_node(node); + rb_node = noko_xml_node_wrap(klass, node); + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { rb_yield(rb_node); } + + return rb_node; +} + +void +noko_init_xml_comment(void) +{ + assert(cNokogiriXmlCharacterData); + /* + * Comment represents a comment node in an xml document. + */ + cNokogiriXmlComment = rb_define_class_under(mNokogiriXml, "Comment", cNokogiriXmlCharacterData); + + rb_define_singleton_method(cNokogiriXmlComment, "new", new, -1); + + document_id = rb_intern("document"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document.c new file mode 100644 index 000000000..740819300 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document.c @@ -0,0 +1,784 @@ +#include + +VALUE cNokogiriXmlDocument ; + +static int +dealloc_node_i2(xmlNodePtr key, xmlNodePtr node, xmlDocPtr doc) +{ + switch (node->type) { + case XML_ATTRIBUTE_NODE: + xmlFreePropList((xmlAttrPtr)node); + break; + case XML_NAMESPACE_DECL: + xmlFreeNs((xmlNsPtr)node); + break; + case XML_DTD_NODE: + xmlFreeDtd((xmlDtdPtr)node); + break; + default: + if (node->parent == NULL) { + node->next = NULL; + node->prev = NULL; + xmlAddChild((xmlNodePtr)doc, node); + } + } + return ST_CONTINUE; +} + +static int +dealloc_node_i(st_data_t key, st_data_t node, st_data_t doc) +{ + return dealloc_node_i2((xmlNodePtr)key, (xmlNodePtr)node, (xmlDocPtr)doc); +} + +static void +remove_private(xmlNodePtr node) +{ + xmlNodePtr child; + + for (child = node->children; child; child = child->next) { + remove_private(child); + } + + if ((node->type == XML_ELEMENT_NODE || + node->type == XML_XINCLUDE_START || + node->type == XML_XINCLUDE_END) && + node->properties) { + for (child = (xmlNodePtr)node->properties; child; child = child->next) { + remove_private(child); + } + } + + node->_private = NULL; +} + +static void +mark(void *data) +{ + xmlDocPtr doc = (xmlDocPtr)data; + nokogiriTuplePtr tuple = (nokogiriTuplePtr)doc->_private; + if (tuple) { + rb_gc_mark(tuple->doc); + rb_gc_mark(tuple->node_cache); + } +} + +static void +dealloc(void *data) +{ + xmlDocPtr doc = (xmlDocPtr)data; + st_table *node_hash; + + node_hash = DOC_UNLINKED_NODE_HASH(doc); + + st_foreach(node_hash, dealloc_node_i, (st_data_t)doc); + st_free_table(node_hash); + + ruby_xfree(doc->_private); + +#if defined(__GNUC__) && __GNUC__ >= 5 +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdeprecated-declarations" // xmlDeregisterNodeDefault is deprecated as of libxml2 2.11.0 +#endif + /* + * libxml-ruby < 3.0.0 uses xmlDeregisterNodeDefault. If the user is using one of those older + * versions, the registered callback from libxml-ruby will access the _private pointers set by + * nokogiri, which will result in segfaults. + * + * To avoid this, we need to clear the _private pointers from all nodes in this document tree + * before that callback gets invoked. + * + * libxml-ruby 3.0.0 was released in 2017-02, so at some point we can probably safely remove this + * safeguard (though probably pairing with a runtime check on the libxml-ruby version). + */ + if (xmlDeregisterNodeDefaultValue) { + remove_private((xmlNodePtr)doc); + } +#if defined(__GNUC__) && __GNUC__ >= 5 +#pragma GCC diagnostic pop +#endif + + xmlFreeDoc(doc); +} + +static size_t +memsize_node(const xmlNodePtr node) +{ + /* note we don't count namespace definitions, just going for a good-enough number here */ + xmlNodePtr child; + xmlAttrPtr property; + size_t memsize = 0; + + memsize += (size_t)xmlStrlen(node->name); + + if (node->type == XML_ELEMENT_NODE) { + for (property = node->properties; property; property = property->next) { + memsize += sizeof(xmlAttr) + memsize_node((xmlNodePtr)property); + } + } + if (node->type == XML_TEXT_NODE) { + memsize += (size_t)xmlStrlen(node->content); + } + for (child = node->children; child; child = child->next) { + memsize += sizeof(xmlNode) + memsize_node(child); + } + return memsize; +} + +static size_t +memsize(const void *data) +{ + xmlDocPtr doc = (const xmlDocPtr)data; + size_t memsize = sizeof(xmlDoc); + /* This may not account for all memory use */ + memsize += memsize_node((xmlNodePtr)doc); + return memsize; +} + +static const rb_data_type_t xml_doc_type = { + .wrap_struct_name = "xmlDoc", + .function = { + .dmark = mark, + .dfree = dealloc, + .dsize = memsize, + }, + // .flags = RUBY_TYPED_FREE_IMMEDIATELY, // TODO see https://github.com/sparklemotion/nokogiri/issues/2822 +}; + +static VALUE +_xml_document_alloc(VALUE klass) +{ + return TypedData_Wrap_Struct(klass, &xml_doc_type, NULL); +} + +static void +_xml_document_data_ptr_set(VALUE rb_document, xmlDocPtr c_document) +{ + nokogiriTuplePtr tuple; + + assert(DATA_PTR(rb_document) == NULL); + assert(c_document->_private == NULL); + + DATA_PTR(rb_document) = c_document; + + tuple = (nokogiriTuplePtr)ruby_xmalloc(sizeof(nokogiriTuple)); + tuple->doc = rb_document; + tuple->unlinkedNodes = st_init_numtable_with_size(128); + tuple->node_cache = rb_ary_new(); + + c_document->_private = tuple ; + + rb_iv_set(rb_document, "@node_cache", tuple->node_cache); + + return; +} + +/* :nodoc: */ +static VALUE +rb_xml_document_initialize_copy_with_args(VALUE rb_self, VALUE rb_other, VALUE rb_level) +{ + xmlDocPtr c_other, c_self; + int c_level; + + c_other = noko_xml_document_unwrap(rb_other); + c_level = (int)NUM2INT(rb_level); + + c_self = xmlCopyDoc(c_other, c_level); + if (c_self == NULL) { return Qnil; } + + c_self->type = c_other->type; + _xml_document_data_ptr_set(rb_self, c_self); + + return rb_self ; +} + +static void +recursively_remove_namespaces_from_node(xmlNodePtr node) +{ + xmlNodePtr child ; + xmlAttrPtr property ; + + xmlSetNs(node, NULL); + + for (child = node->children ; child ; child = child->next) { + recursively_remove_namespaces_from_node(child); + } + + if (((node->type == XML_ELEMENT_NODE) || + (node->type == XML_XINCLUDE_START) || + (node->type == XML_XINCLUDE_END)) && + node->nsDef) { + xmlNsPtr curr = node->nsDef; + while (curr) { + noko_xml_document_pin_namespace(curr, node->doc); + curr = curr->next; + } + node->nsDef = NULL; + } + + if (node->type == XML_ELEMENT_NODE && node->properties != NULL) { + property = node->properties ; + while (property != NULL) { + if (property->ns) { property->ns = NULL ; } + property = property->next ; + } + } +} + +/* + * call-seq: + * url + * + * Get the url name for this document. + */ +static VALUE +url(VALUE self) +{ + xmlDocPtr doc = noko_xml_document_unwrap(self); + + if (doc->URL) { return NOKOGIRI_STR_NEW2(doc->URL); } + + return Qnil; +} + +/* + * call-seq: + * root= + * + * Set the root element on this document + */ +static VALUE +rb_xml_document_root_set(VALUE self, VALUE rb_new_root) +{ + xmlDocPtr c_document; + xmlNodePtr c_new_root = NULL, c_current_root; + + c_document = noko_xml_document_unwrap(self); + + c_current_root = xmlDocGetRootElement(c_document); + if (c_current_root) { + xmlUnlinkNode(c_current_root); + noko_xml_document_pin_node(c_current_root); + } + + if (!NIL_P(rb_new_root)) { + if (!rb_obj_is_kind_of(rb_new_root, cNokogiriXmlNode)) { + rb_raise(rb_eArgError, + "expected Nokogiri::XML::Node but received %"PRIsVALUE, + rb_obj_class(rb_new_root)); + } + + Noko_Node_Get_Struct(rb_new_root, xmlNode, c_new_root); + + /* If the new root's document is not the same as the current document, + * then we need to dup the node in to this document. */ + if (c_new_root->doc != c_document) { + c_new_root = xmlDocCopyNode(c_new_root, c_document, 1); + if (!c_new_root) { + rb_raise(rb_eRuntimeError, "Could not reparent node (xmlDocCopyNode)"); + } + } + } + + xmlDocSetRootElement(c_document, c_new_root); + + return rb_new_root; +} + +/* + * call-seq: + * root + * + * Get the root node for this document. + */ +static VALUE +rb_xml_document_root(VALUE self) +{ + xmlDocPtr c_document; + xmlNodePtr c_root; + + c_document = noko_xml_document_unwrap(self); + + c_root = xmlDocGetRootElement(c_document); + if (!c_root) { + return Qnil; + } + + return noko_xml_node_wrap(Qnil, c_root) ; +} + +/* + * call-seq: + * encoding= encoding + * + * Set the encoding string for this Document + */ +static VALUE +set_encoding(VALUE self, VALUE encoding) +{ + xmlDocPtr doc = noko_xml_document_unwrap(self); + + if (doc->encoding) { + xmlFree(DISCARD_CONST_QUAL_XMLCHAR(doc->encoding)); + } + + doc->encoding = xmlStrdup((xmlChar *)StringValueCStr(encoding)); + + return encoding; +} + +/* + * call-seq: + * encoding + * + * Get the encoding for this Document + */ +static VALUE +encoding(VALUE self) +{ + xmlDocPtr doc = noko_xml_document_unwrap(self); + + if (!doc->encoding) { return Qnil; } + return NOKOGIRI_STR_NEW2(doc->encoding); +} + +/* + * call-seq: + * version + * + * Get the XML version for this Document + */ +static VALUE +version(VALUE self) +{ + xmlDocPtr doc = noko_xml_document_unwrap(self); + + if (!doc->version) { return Qnil; } + return NOKOGIRI_STR_NEW2(doc->version); +} + +/* + * call-seq: + * read_io(io, url, encoding, options) + * + * Create a new document from an IO object + */ +static VALUE +noko_xml_document_s_read_io(VALUE rb_class, + VALUE rb_io, + VALUE rb_url, + VALUE rb_encoding, + VALUE rb_options) +{ + /* TODO: deprecate this method, parse should be the preferred entry point. then we can make this + private. */ + libxmlStructuredErrorHandlerState handler_state; + VALUE rb_errors = rb_ary_new(); + + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + + const char *c_url = NIL_P(rb_url) ? NULL : StringValueCStr(rb_url); + const char *c_enc = NIL_P(rb_encoding) ? NULL : StringValueCStr(rb_encoding); + xmlDocPtr c_document = xmlReadIO( + (xmlInputReadCallback)noko_io_read, + (xmlInputCloseCallback)noko_io_close, + (void *)rb_io, + c_url, + c_enc, + (int)NUM2INT(rb_options) + ); + + noko__structured_error_func_restore(&handler_state); + + if (c_document == NULL) { + xmlFreeDoc(c_document); + + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Could not parse document"); + } + } + + VALUE rb_document = noko_xml_document_wrap(rb_class, c_document); + rb_iv_set(rb_document, "@errors", rb_errors); + return rb_document; +} + +/* + * call-seq: + * read_memory(string, url, encoding, options) + * + * Create a new document from a String + */ +static VALUE +noko_xml_document_s_read_memory(VALUE rb_class, + VALUE rb_input, + VALUE rb_url, + VALUE rb_encoding, + VALUE rb_options) +{ + /* TODO: deprecate this method, parse should be the preferred entry point. then we can make this + private. */ + VALUE rb_errors = rb_ary_new(); + xmlSetStructuredErrorFunc((void *)rb_errors, noko__error_array_pusher); + + const char *c_buffer = StringValuePtr(rb_input); + const char *c_url = NIL_P(rb_url) ? NULL : StringValueCStr(rb_url); + const char *c_enc = NIL_P(rb_encoding) ? NULL : StringValueCStr(rb_encoding); + int c_buffer_len = (int)RSTRING_LEN(rb_input); + xmlDocPtr c_document = xmlReadMemory(c_buffer, c_buffer_len, c_url, c_enc, (int)NUM2INT(rb_options)); + + xmlSetStructuredErrorFunc(NULL, NULL); + + if (c_document == NULL) { + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Could not parse document"); + } + } + + VALUE document = noko_xml_document_wrap(rb_class, c_document); + rb_iv_set(document, "@errors", rb_errors); + return document; +} + +/* + * call-seq: + * new(version = "1.0") + * + * Create a new empty document declaring XML version +version+. + */ +static VALUE +new (int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr doc; + VALUE version, rest, rb_doc ; + + rb_scan_args(argc, argv, "0*", &rest); + version = rb_ary_entry(rest, (long)0); + if (NIL_P(version)) { version = rb_str_new2("1.0"); } + + doc = xmlNewDoc((xmlChar *)StringValueCStr(version)); + rb_doc = noko_xml_document_wrap_with_init_args(klass, doc, argc, argv); + return rb_doc ; +} + +/* + * call-seq: + * remove_namespaces! + * + * Remove all namespaces from all nodes in the document. + * + * This could be useful for developers who either don't understand namespaces + * or don't care about them. + * + * The following example shows a use case, and you can decide for yourself + * whether this is a good thing or not: + * + * doc = Nokogiri::XML <<-EOXML + * + * + * Michelin Model XGV + * + * + * I'm a bicycle tire! + * + * + * EOXML + * + * doc.xpath("//tire").to_s # => "" + * doc.xpath("//part:tire", "part" => "http://general-motors.com/").to_s # => "Michelin Model XGV" + * doc.xpath("//part:tire", "part" => "http://schwinn.com/").to_s # => "I'm a bicycle tire!" + * + * doc.remove_namespaces! + * + * doc.xpath("//tire").to_s # => "Michelin Model XGVI'm a bicycle tire!" + * doc.xpath("//part:tire", "part" => "http://general-motors.com/").to_s # => "" + * doc.xpath("//part:tire", "part" => "http://schwinn.com/").to_s # => "" + * + * For more information on why this probably is *not* a good thing in general, + * please direct your browser to + * http://tenderlovemaking.com/2009/04/23/namespaces-in-xml.html + */ +static VALUE +remove_namespaces_bang(VALUE self) +{ + xmlDocPtr doc = noko_xml_document_unwrap(self); + + recursively_remove_namespaces_from_node((xmlNodePtr)doc); + return self; +} + +/* call-seq: + * doc.create_entity(name, type, external_id, system_id, content) + * + * Create a new entity named +name+. + * + * +type+ is an integer representing the type of entity to be created, and it defaults to + * +Nokogiri::XML::EntityDecl::INTERNAL_GENERAL+. See the constants on Nokogiri::XML::EntityDecl for + * more information. + * + * +external_id+, +system_id+, and +content+ set the External ID, System ID, + * and content respectively. All of these parameters are optional. + */ +static VALUE +noko_xml_document__create_entity(int argc, VALUE *argv, VALUE rb_document) +{ + VALUE rb_name; + VALUE rb_type; + VALUE rb_ext_id; + VALUE rb_sys_id; + VALUE rb_content; + + rb_scan_args(argc, argv, "14", + &rb_name, + &rb_type, &rb_ext_id, &rb_sys_id, &rb_content); + + xmlDocPtr c_document = noko_xml_document_unwrap(rb_document); + + libxmlStructuredErrorHandlerState handler_state; + VALUE rb_errors = rb_ary_new(); + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + + xmlEntityPtr c_entity = xmlAddDocEntity( + c_document, + (xmlChar *)(NIL_P(rb_name) ? NULL : StringValueCStr(rb_name)), + (int)(NIL_P(rb_type) ? XML_INTERNAL_GENERAL_ENTITY : NUM2INT(rb_type)), + (xmlChar *)(NIL_P(rb_ext_id) ? NULL : StringValueCStr(rb_ext_id)), + (xmlChar *)(NIL_P(rb_sys_id) ? NULL : StringValueCStr(rb_sys_id)), + (xmlChar *)(NIL_P(rb_content) ? NULL : StringValueCStr(rb_content)) + ); + + noko__structured_error_func_restore(&handler_state); + + if (NULL == c_entity) { + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Could not create entity"); + } + } + + return noko_xml_node_wrap(cNokogiriXmlEntityDecl, (xmlNodePtr)c_entity); +} + +static int +block_caller(void *ctx, xmlNodePtr c_node, xmlNodePtr c_parent_node) +{ + VALUE block = (VALUE)ctx; + VALUE rb_node; + VALUE rb_parent_node; + VALUE ret; + + if (c_node->type == XML_NAMESPACE_DECL) { + rb_node = noko_xml_namespace_wrap((xmlNsPtr)c_node, c_parent_node->doc); + } else { + rb_node = noko_xml_node_wrap(Qnil, c_node); + } + rb_parent_node = c_parent_node ? noko_xml_node_wrap(Qnil, c_parent_node) : Qnil; + + ret = rb_funcall(block, rb_intern("call"), 2, rb_node, rb_parent_node); + + return (Qfalse == ret || Qnil == ret) ? 0 : 1; +} + +/* call-seq: + * doc.canonicalize(mode=XML_C14N_1_0,inclusive_namespaces=nil,with_comments=false) + * doc.canonicalize { |obj, parent| ... } + * + * Canonicalize a document and return the results. Takes an optional block + * that takes two parameters: the +obj+ and that node's +parent+. + * The +obj+ will be either a Nokogiri::XML::Node, or a Nokogiri::XML::Namespace + * The block must return a non-nil, non-false value if the +obj+ passed in + * should be included in the canonicalized document. + */ +static VALUE +rb_xml_document_canonicalize(int argc, VALUE *argv, VALUE self) +{ + VALUE rb_mode; + VALUE rb_namespaces; + VALUE rb_comments_p; + int c_mode = 0; + xmlChar **c_namespaces; + + xmlDocPtr c_doc; + xmlOutputBufferPtr c_obuf; + xmlC14NIsVisibleCallback c_callback_wrapper = NULL; + void *rb_callback = NULL; + + VALUE rb_cStringIO; + VALUE rb_io; + + rb_scan_args(argc, argv, "03", &rb_mode, &rb_namespaces, &rb_comments_p); + if (!NIL_P(rb_mode)) { + Check_Type(rb_mode, T_FIXNUM); + c_mode = NUM2INT(rb_mode); + } + if (!NIL_P(rb_namespaces)) { + Check_Type(rb_namespaces, T_ARRAY); + if (c_mode == XML_C14N_1_0 || c_mode == XML_C14N_1_1) { + rb_raise(rb_eRuntimeError, "This canonicalizer does not support this operation"); + } + } + + c_doc = noko_xml_document_unwrap(self); + + rb_cStringIO = rb_const_get_at(rb_cObject, rb_intern("StringIO")); + rb_io = rb_class_new_instance(0, 0, rb_cStringIO); + c_obuf = xmlAllocOutputBuffer(NULL); + + c_obuf->writecallback = (xmlOutputWriteCallback)noko_io_write; + c_obuf->closecallback = (xmlOutputCloseCallback)noko_io_close; + c_obuf->context = (void *)rb_io; + + if (rb_block_given_p()) { + c_callback_wrapper = block_caller; + rb_callback = (void *)rb_block_proc(); + } + + if (NIL_P(rb_namespaces)) { + c_namespaces = NULL; + } else { + long ns_len = RARRAY_LEN(rb_namespaces); + c_namespaces = ruby_xcalloc((size_t)ns_len + 1, sizeof(xmlChar *)); + for (int j = 0 ; j < ns_len ; j++) { + VALUE entry = rb_ary_entry(rb_namespaces, j); + c_namespaces[j] = (xmlChar *)StringValueCStr(entry); + } + } + + xmlC14NExecute(c_doc, c_callback_wrapper, rb_callback, + c_mode, + c_namespaces, + (int)RTEST(rb_comments_p), + c_obuf); + + ruby_xfree(c_namespaces); + xmlOutputBufferClose(c_obuf); + + return rb_funcall(rb_io, rb_intern("string"), 0); +} + +VALUE +noko_xml_document_wrap_with_init_args(VALUE klass, xmlDocPtr c_document, int argc, VALUE *argv) +{ + VALUE rb_document; + + if (!klass) { + klass = cNokogiriXmlDocument; + } + + rb_document = _xml_document_alloc(klass); + _xml_document_data_ptr_set(rb_document, c_document); + + rb_iv_set(rb_document, "@decorators", Qnil); + rb_iv_set(rb_document, "@errors", Qnil); + + rb_obj_call_init(rb_document, argc, argv); + + return rb_document ; +} + + +/* deprecated. use noko_xml_document_wrap() instead. */ +VALUE +Nokogiri_wrap_xml_document(VALUE klass, xmlDocPtr doc) +{ + /* TODO: deprecate this method in v2.0 */ + return noko_xml_document_wrap_with_init_args(klass, doc, 0, NULL); +} + +VALUE +noko_xml_document_wrap(VALUE klass, xmlDocPtr doc) +{ + return noko_xml_document_wrap_with_init_args(klass, doc, 0, NULL); +} + +xmlDocPtr +noko_xml_document_unwrap(VALUE rb_document) +{ + xmlDocPtr c_document; + TypedData_Get_Struct(rb_document, xmlDoc, &xml_doc_type, c_document); + return c_document; +} + +/* Schema creation will remove and deallocate "blank" nodes. + * If those blank nodes have been exposed to Ruby, they could get freed + * out from under the VALUE pointer. This function checks to see if any of + * those nodes have been exposed to Ruby, and if so we should raise an exception. + */ +int +noko_xml_document_has_wrapped_blank_nodes_p(xmlDocPtr c_document) +{ + VALUE cache = DOC_NODE_CACHE(c_document); + + if (NIL_P(cache)) { + return 0; + } + + for (long jnode = 0; jnode < RARRAY_LEN(cache); jnode++) { + xmlNodePtr node; + VALUE element = rb_ary_entry(cache, jnode); + + Noko_Node_Get_Struct(element, xmlNode, node); + if (xmlIsBlankNode(node)) { + return 1; + } + } + + return 0; +} + +void +noko_xml_document_pin_node(xmlNodePtr node) +{ + xmlDocPtr doc; + nokogiriTuplePtr tuple; + + doc = node->doc; + tuple = (nokogiriTuplePtr)doc->_private; + st_insert(tuple->unlinkedNodes, (st_data_t)node, (st_data_t)node); +} + + +void +noko_xml_document_pin_namespace(xmlNsPtr ns, xmlDocPtr doc) +{ + nokogiriTuplePtr tuple; + + tuple = (nokogiriTuplePtr)doc->_private; + st_insert(tuple->unlinkedNodes, (st_data_t)ns, (st_data_t)ns); +} + + +void +noko_init_xml_document(void) +{ + assert(cNokogiriXmlNode); + + cNokogiriXmlDocument = rb_define_class_under(mNokogiriXml, "Document", cNokogiriXmlNode); + + rb_define_alloc_func(cNokogiriXmlDocument, _xml_document_alloc); + + rb_define_singleton_method(cNokogiriXmlDocument, "read_memory", noko_xml_document_s_read_memory, 4); + rb_define_singleton_method(cNokogiriXmlDocument, "read_io", noko_xml_document_s_read_io, 4); + rb_define_singleton_method(cNokogiriXmlDocument, "new", new, -1); + + rb_define_method(cNokogiriXmlDocument, "root", rb_xml_document_root, 0); + rb_define_method(cNokogiriXmlDocument, "root=", rb_xml_document_root_set, 1); + rb_define_method(cNokogiriXmlDocument, "encoding", encoding, 0); + rb_define_method(cNokogiriXmlDocument, "encoding=", set_encoding, 1); + rb_define_method(cNokogiriXmlDocument, "version", version, 0); + rb_define_method(cNokogiriXmlDocument, "canonicalize", rb_xml_document_canonicalize, -1); + rb_define_method(cNokogiriXmlDocument, "url", url, 0); + rb_define_method(cNokogiriXmlDocument, "create_entity", noko_xml_document__create_entity, -1); + rb_define_method(cNokogiriXmlDocument, "remove_namespaces!", remove_namespaces_bang, 0); + + rb_define_protected_method(cNokogiriXmlDocument, "initialize_copy_with_args", rb_xml_document_initialize_copy_with_args, + 2); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document_fragment.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document_fragment.c new file mode 100644 index 000000000..3f28d28d4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_document_fragment.c @@ -0,0 +1,29 @@ +#include + +VALUE cNokogiriXmlDocumentFragment; + +/* :nodoc: */ +static VALUE +noko_xml_document_fragment_s_native_new(VALUE klass, VALUE rb_doc) +{ + xmlDocPtr c_doc; + xmlNodePtr c_node; + VALUE rb_node; + + c_doc = noko_xml_document_unwrap(rb_doc); + c_node = xmlNewDocFragment(c_doc->doc); + noko_xml_document_pin_node(c_node); + rb_node = noko_xml_node_wrap(klass, c_node); + + return rb_node; +} + +void +noko_init_xml_document_fragment(void) +{ + assert(cNokogiriXmlNode); + + cNokogiriXmlDocumentFragment = rb_define_class_under(mNokogiriXml, "DocumentFragment", cNokogiriXmlNode); + + rb_define_singleton_method(cNokogiriXmlDocumentFragment, "native_new", noko_xml_document_fragment_s_native_new, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_dtd.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_dtd.c new file mode 100644 index 000000000..d36102057 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_dtd.c @@ -0,0 +1,208 @@ +#include + +VALUE cNokogiriXmlDtd; + +static void +notation_copier(void *c_notation_ptr, void *rb_hash_ptr, const xmlChar *name) +{ + VALUE rb_hash = (VALUE)rb_hash_ptr; + xmlNotationPtr c_notation = (xmlNotationPtr)c_notation_ptr; + VALUE rb_notation; + VALUE cNokogiriXmlNotation; + VALUE rb_constructor_args[3]; + + rb_constructor_args[0] = (c_notation->name ? NOKOGIRI_STR_NEW2(c_notation->name) : Qnil); + rb_constructor_args[1] = (c_notation->PublicID ? NOKOGIRI_STR_NEW2(c_notation->PublicID) : Qnil); + rb_constructor_args[2] = (c_notation->SystemID ? NOKOGIRI_STR_NEW2(c_notation->SystemID) : Qnil); + + cNokogiriXmlNotation = rb_const_get_at(mNokogiriXml, rb_intern("Notation")); + rb_notation = rb_class_new_instance(3, rb_constructor_args, cNokogiriXmlNotation); + + rb_hash_aset(rb_hash, NOKOGIRI_STR_NEW2(name), rb_notation); +} + +static void +element_copier(void *c_node_ptr, void *rb_hash_ptr, const xmlChar *c_name) +{ + VALUE rb_hash = (VALUE)rb_hash_ptr; + xmlNodePtr c_node = (xmlNodePtr)c_node_ptr; + + VALUE rb_node = noko_xml_node_wrap(Qnil, c_node); + + rb_hash_aset(rb_hash, NOKOGIRI_STR_NEW2(c_name), rb_node); +} + +/* + * call-seq: + * entities + * + * Get a hash of the elements for this DTD. + */ +static VALUE +entities(VALUE self) +{ + xmlDtdPtr dtd; + VALUE hash; + + Noko_Node_Get_Struct(self, xmlDtd, dtd); + + if (!dtd->entities) { return Qnil; } + + hash = rb_hash_new(); + + xmlHashScan((xmlHashTablePtr)dtd->entities, element_copier, (void *)hash); + + return hash; +} + +/* + * call-seq: + * notations() → Hash + * + * [Returns] All the notations for this DTD in a Hash of Notation +name+ to Notation. + */ +static VALUE +notations(VALUE self) +{ + xmlDtdPtr dtd; + VALUE hash; + + Noko_Node_Get_Struct(self, xmlDtd, dtd); + + if (!dtd->notations) { return Qnil; } + + hash = rb_hash_new(); + + xmlHashScan((xmlHashTablePtr)dtd->notations, notation_copier, (void *)hash); + + return hash; +} + +/* + * call-seq: + * attributes + * + * Get a hash of the attributes for this DTD. + */ +static VALUE +attributes(VALUE self) +{ + xmlDtdPtr dtd; + VALUE hash; + + Noko_Node_Get_Struct(self, xmlDtd, dtd); + + hash = rb_hash_new(); + + if (!dtd->attributes) { return hash; } + + xmlHashScan((xmlHashTablePtr)dtd->attributes, element_copier, (void *)hash); + + return hash; +} + +/* + * call-seq: + * elements + * + * Get a hash of the elements for this DTD. + */ +static VALUE +elements(VALUE self) +{ + xmlDtdPtr dtd; + VALUE hash; + + Noko_Node_Get_Struct(self, xmlDtd, dtd); + + if (!dtd->elements) { return Qnil; } + + hash = rb_hash_new(); + + xmlHashScan((xmlHashTablePtr)dtd->elements, element_copier, (void *)hash); + + return hash; +} + +/* + * call-seq: + * validate(document) + * + * Validate +document+ returning a list of errors + */ +static VALUE +validate(VALUE self, VALUE document) +{ + xmlDocPtr doc; + xmlDtdPtr dtd; + xmlValidCtxtPtr ctxt; + VALUE error_list; + + Noko_Node_Get_Struct(self, xmlDtd, dtd); + doc = noko_xml_document_unwrap(document); + error_list = rb_ary_new(); + + ctxt = xmlNewValidCtxt(); + + xmlSetStructuredErrorFunc((void *)error_list, noko__error_array_pusher); + + xmlValidateDtd(ctxt, doc, dtd); + + xmlSetStructuredErrorFunc(NULL, NULL); + + xmlFreeValidCtxt(ctxt); + + return error_list; +} + +/* + * call-seq: + * system_id + * + * Get the System ID for this DTD + */ +static VALUE +system_id(VALUE self) +{ + xmlDtdPtr dtd; + Noko_Node_Get_Struct(self, xmlDtd, dtd); + + if (!dtd->SystemID) { return Qnil; } + + return NOKOGIRI_STR_NEW2(dtd->SystemID); +} + +/* + * call-seq: + * external_id + * + * Get the External ID for this DTD + */ +static VALUE +external_id(VALUE self) +{ + xmlDtdPtr dtd; + Noko_Node_Get_Struct(self, xmlDtd, dtd); + + if (!dtd->ExternalID) { return Qnil; } + + return NOKOGIRI_STR_NEW2(dtd->ExternalID); +} + +void +noko_init_xml_dtd(void) +{ + assert(cNokogiriXmlNode); + /* + * Nokogiri::XML::DTD wraps DTD nodes in an XML document + */ + cNokogiriXmlDtd = rb_define_class_under(mNokogiriXml, "DTD", cNokogiriXmlNode); + + rb_define_method(cNokogiriXmlDtd, "notations", notations, 0); + rb_define_method(cNokogiriXmlDtd, "elements", elements, 0); + rb_define_method(cNokogiriXmlDtd, "entities", entities, 0); + rb_define_method(cNokogiriXmlDtd, "validate", validate, 1); + rb_define_method(cNokogiriXmlDtd, "attributes", attributes, 0); + rb_define_method(cNokogiriXmlDtd, "system_id", system_id, 0); + rb_define_method(cNokogiriXmlDtd, "external_id", external_id, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_content.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_content.c new file mode 100644 index 000000000..b4fa884a5 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_content.c @@ -0,0 +1,131 @@ +#include + +VALUE cNokogiriXmlElementContent; + +static const rb_data_type_t xml_element_content_type = { + .wrap_struct_name = "xmlElementContent", + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +/* + * call-seq: + * name → String + * + * [Returns] The content element's +name+ + */ +static VALUE +get_name(VALUE self) +{ + xmlElementContentPtr elem; + TypedData_Get_Struct(self, xmlElementContent, &xml_element_content_type, elem); + + if (!elem->name) { return Qnil; } + return NOKOGIRI_STR_NEW2(elem->name); +} + +/* + * call-seq: + * type → Integer + * + * [Returns] The content element's +type+. Possible values are +PCDATA+, +ELEMENT+, +SEQ+, or +OR+. + */ +static VALUE +get_type(VALUE self) +{ + xmlElementContentPtr elem; + TypedData_Get_Struct(self, xmlElementContent, &xml_element_content_type, elem); + + return INT2NUM(elem->type); +} + +/* + * Get the first child. + */ +static VALUE +get_c1(VALUE self) +{ + xmlElementContentPtr elem; + TypedData_Get_Struct(self, xmlElementContent, &xml_element_content_type, elem); + + if (!elem->c1) { return Qnil; } + return noko_xml_element_content_wrap(rb_iv_get(self, "@document"), elem->c1); +} + +/* + * Get the second child. + */ +static VALUE +get_c2(VALUE self) +{ + xmlElementContentPtr elem; + TypedData_Get_Struct(self, xmlElementContent, &xml_element_content_type, elem); + + if (!elem->c2) { return Qnil; } + return noko_xml_element_content_wrap(rb_iv_get(self, "@document"), elem->c2); +} + +/* + * call-seq: + * occur → Integer + * + * [Returns] The content element's +occur+ flag. Possible values are +ONCE+, +OPT+, +MULT+ or +PLUS+. + */ +static VALUE +get_occur(VALUE self) +{ + xmlElementContentPtr elem; + TypedData_Get_Struct(self, xmlElementContent, &xml_element_content_type, elem); + + return INT2NUM(elem->ocur); +} + +/* + * call-seq: + * prefix → String + * + * [Returns] The content element's namespace +prefix+. + */ +static VALUE +get_prefix(VALUE self) +{ + xmlElementContentPtr elem; + TypedData_Get_Struct(self, xmlElementContent, &xml_element_content_type, elem); + + if (!elem->prefix) { return Qnil; } + + return NOKOGIRI_STR_NEW2(elem->prefix); +} + +/* + * create a Nokogiri::XML::ElementContent object around an +element+. + */ +VALUE +noko_xml_element_content_wrap(VALUE rb_document, xmlElementContentPtr c_element_content) +{ + VALUE elem = TypedData_Wrap_Struct( + cNokogiriXmlElementContent, + &xml_element_content_type, + c_element_content + ); + + /* keep a handle on the document for GC marking */ + rb_iv_set(elem, "@document", rb_document); + + return elem; +} + +void +noko_init_xml_element_content(void) +{ + cNokogiriXmlElementContent = rb_define_class_under(mNokogiriXml, "ElementContent", rb_cObject); + + rb_undef_alloc_func(cNokogiriXmlElementContent); + + rb_define_method(cNokogiriXmlElementContent, "name", get_name, 0); + rb_define_method(cNokogiriXmlElementContent, "type", get_type, 0); + rb_define_method(cNokogiriXmlElementContent, "occur", get_occur, 0); + rb_define_method(cNokogiriXmlElementContent, "prefix", get_prefix, 0); + + rb_define_private_method(cNokogiriXmlElementContent, "c1", get_c1, 0); + rb_define_private_method(cNokogiriXmlElementContent, "c2", get_c2, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_decl.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_decl.c new file mode 100644 index 000000000..58981d355 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_element_decl.c @@ -0,0 +1,69 @@ +#include + +VALUE cNokogiriXmlElementDecl; + +static ID id_document; + +/* + * call-seq: + * element_type → Integer + * + * The element_type + */ +static VALUE +element_type(VALUE self) +{ + xmlElementPtr node; + Noko_Node_Get_Struct(self, xmlElement, node); + return INT2NUM(node->etype); +} + +/* + * call-seq: + * content → Nokogiri::XML::ElementContent + * + * [Returns] The root of this element declaration's content tree. + */ +static VALUE +content(VALUE self) +{ + xmlElementPtr node; + Noko_Node_Get_Struct(self, xmlElement, node); + + if (!node->content) { return Qnil; } + + return noko_xml_element_content_wrap( + rb_funcall(self, id_document, 0), + node->content + ); +} + +/* + * call-seq: + * prefix → String + * + * [Returns] The namespace +prefix+ for this element declaration. + */ +static VALUE +prefix(VALUE self) +{ + xmlElementPtr node; + Noko_Node_Get_Struct(self, xmlElement, node); + + if (!node->prefix) { return Qnil; } + + return NOKOGIRI_STR_NEW2(node->prefix); +} + +void +noko_init_xml_element_decl(void) +{ + assert(cNokogiriXmlNode); + cNokogiriXmlElementDecl = rb_define_class_under(mNokogiriXml, "ElementDecl", cNokogiriXmlNode); + + rb_define_method(cNokogiriXmlElementDecl, "element_type", element_type, 0); + rb_define_method(cNokogiriXmlElementDecl, "content", content, 0); + rb_define_method(cNokogiriXmlElementDecl, "prefix", prefix, 0); + + id_document = rb_intern("document"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_encoding_handler.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_encoding_handler.c new file mode 100644 index 000000000..099278775 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_encoding_handler.c @@ -0,0 +1,112 @@ +#include + +VALUE cNokogiriEncodingHandler; + +static void +xml_encoding_handler_dealloc(void *data) +{ + /* make sure iconv handlers are cleaned up and freed */ + xmlCharEncodingHandlerPtr c_handler = data; + xmlCharEncCloseFunc(c_handler); +} + +static const rb_data_type_t xml_char_encoding_handler_type = { + .wrap_struct_name = "xmlCharEncodingHandler", + .function = { + .dfree = xml_encoding_handler_dealloc, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + + +/* + * call-seq: Nokogiri::EncodingHandler.[](name) + * + * Get the encoding handler for +name+ + */ +static VALUE +rb_xml_encoding_handler_s_get(VALUE klass, VALUE key) +{ + xmlCharEncodingHandlerPtr handler; + + handler = xmlFindCharEncodingHandler(StringValueCStr(key)); + if (handler) { + return TypedData_Wrap_Struct(klass, &xml_char_encoding_handler_type, handler); + } + + return Qnil; +} + + +/* + * call-seq: Nokogiri::EncodingHandler.delete(name) + * + * Delete the encoding alias named +name+ + */ +static VALUE +rb_xml_encoding_handler_s_delete(VALUE klass, VALUE name) +{ + if (xmlDelEncodingAlias(StringValueCStr(name))) { return Qnil; } + + return Qtrue; +} + + +/* + * call-seq: Nokogiri::EncodingHandler.alias(real_name, alias_name) + * + * Alias encoding handler with name +real_name+ to name +alias_name+ + */ +static VALUE +rb_xml_encoding_handler_s_alias(VALUE klass, VALUE from, VALUE to) +{ + xmlAddEncodingAlias(StringValueCStr(from), StringValueCStr(to)); + + return to; +} + + +/* + * call-seq: Nokogiri::EncodingHandler.clear_aliases! + * + * Remove all encoding aliases. + */ +static VALUE +rb_xml_encoding_handler_s_clear_aliases(VALUE klass) +{ + xmlCleanupEncodingAliases(); + + return klass; +} + + +/* + * call-seq: name + * + * Get the name of this EncodingHandler + */ +static VALUE +rb_xml_encoding_handler_name(VALUE self) +{ + xmlCharEncodingHandlerPtr handler; + + TypedData_Get_Struct(self, xmlCharEncodingHandler, &xml_char_encoding_handler_type, handler); + + return NOKOGIRI_STR_NEW2(handler->name); +} + + +void +noko_init_xml_encoding_handler(void) +{ + cNokogiriEncodingHandler = rb_define_class_under(mNokogiri, "EncodingHandler", rb_cObject); + + rb_undef_alloc_func(cNokogiriEncodingHandler); + + rb_define_singleton_method(cNokogiriEncodingHandler, "[]", rb_xml_encoding_handler_s_get, 1); + rb_define_singleton_method(cNokogiriEncodingHandler, "delete", rb_xml_encoding_handler_s_delete, 1); + rb_define_singleton_method(cNokogiriEncodingHandler, "alias", rb_xml_encoding_handler_s_alias, 2); + rb_define_singleton_method(cNokogiriEncodingHandler, "clear_aliases!", rb_xml_encoding_handler_s_clear_aliases, 0); + + rb_define_method(cNokogiriEncodingHandler, "name", rb_xml_encoding_handler_name, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_decl.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_decl.c new file mode 100644 index 000000000..4b7f40783 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_decl.c @@ -0,0 +1,112 @@ +#include + +VALUE cNokogiriXmlEntityDecl; + +/* + * call-seq: + * original_content + * + * Get the original_content before ref substitution + */ +static VALUE +original_content(VALUE self) +{ + xmlEntityPtr node; + Noko_Node_Get_Struct(self, xmlEntity, node); + + if (!node->orig) { return Qnil; } + + return NOKOGIRI_STR_NEW2(node->orig); +} + +/* + * call-seq: + * content + * + * Get the content + */ +static VALUE +get_content(VALUE self) +{ + xmlEntityPtr node; + Noko_Node_Get_Struct(self, xmlEntity, node); + + if (!node->content) { return Qnil; } + + return NOKOGIRI_STR_NEW(node->content, node->length); +} + +/* + * call-seq: + * entity_type + * + * Get the entity type + */ +static VALUE +entity_type(VALUE self) +{ + xmlEntityPtr node; + Noko_Node_Get_Struct(self, xmlEntity, node); + + return INT2NUM((int)node->etype); +} + +/* + * call-seq: + * external_id + * + * Get the external identifier for PUBLIC + */ +static VALUE +external_id(VALUE self) +{ + xmlEntityPtr node; + Noko_Node_Get_Struct(self, xmlEntity, node); + + if (!node->ExternalID) { return Qnil; } + + return NOKOGIRI_STR_NEW2(node->ExternalID); +} + +/* + * call-seq: + * system_id + * + * Get the URI for a SYSTEM or PUBLIC Entity + */ +static VALUE +system_id(VALUE self) +{ + xmlEntityPtr node; + Noko_Node_Get_Struct(self, xmlEntity, node); + + if (!node->SystemID) { return Qnil; } + + return NOKOGIRI_STR_NEW2(node->SystemID); +} + +void +noko_init_xml_entity_decl(void) +{ + assert(cNokogiriXmlNode); + cNokogiriXmlEntityDecl = rb_define_class_under(mNokogiriXml, "EntityDecl", cNokogiriXmlNode); + + rb_define_method(cNokogiriXmlEntityDecl, "original_content", original_content, 0); + rb_define_method(cNokogiriXmlEntityDecl, "content", get_content, 0); + rb_define_method(cNokogiriXmlEntityDecl, "entity_type", entity_type, 0); + rb_define_method(cNokogiriXmlEntityDecl, "external_id", external_id, 0); + rb_define_method(cNokogiriXmlEntityDecl, "system_id", system_id, 0); + + rb_const_set(cNokogiriXmlEntityDecl, rb_intern("INTERNAL_GENERAL"), + INT2NUM(XML_INTERNAL_GENERAL_ENTITY)); + rb_const_set(cNokogiriXmlEntityDecl, rb_intern("EXTERNAL_GENERAL_PARSED"), + INT2NUM(XML_EXTERNAL_GENERAL_PARSED_ENTITY)); + rb_const_set(cNokogiriXmlEntityDecl, rb_intern("EXTERNAL_GENERAL_UNPARSED"), + INT2NUM(XML_EXTERNAL_GENERAL_UNPARSED_ENTITY)); + rb_const_set(cNokogiriXmlEntityDecl, rb_intern("INTERNAL_PARAMETER"), + INT2NUM(XML_INTERNAL_PARAMETER_ENTITY)); + rb_const_set(cNokogiriXmlEntityDecl, rb_intern("EXTERNAL_PARAMETER"), + INT2NUM(XML_EXTERNAL_PARAMETER_ENTITY)); + rb_const_set(cNokogiriXmlEntityDecl, rb_intern("INTERNAL_PREDEFINED"), + INT2NUM(XML_INTERNAL_PREDEFINED_ENTITY)); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_reference.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_reference.c new file mode 100644 index 000000000..3fcc3e547 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_entity_reference.c @@ -0,0 +1,50 @@ +#include + +VALUE cNokogiriXmlEntityReference; + +/* + * call-seq: + * new(document, content) + * + * Create a new EntityReference element on the +document+ with +name+ + */ +static VALUE +new (int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr xml_doc; + xmlNodePtr node; + VALUE document; + VALUE name; + VALUE rest; + VALUE rb_node; + + rb_scan_args(argc, argv, "2*", &document, &name, &rest); + + xml_doc = noko_xml_document_unwrap(document); + + node = xmlNewReference( + xml_doc, + (const xmlChar *)StringValueCStr(name) + ); + + noko_xml_document_pin_node(node); + + rb_node = noko_xml_node_wrap(klass, node); + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { rb_yield(rb_node); } + + return rb_node; +} + +void +noko_init_xml_entity_reference(void) +{ + assert(cNokogiriXmlNode); + /* + * EntityReference represents an EntityReference node in an xml document. + */ + cNokogiriXmlEntityReference = rb_define_class_under(mNokogiriXml, "EntityReference", cNokogiriXmlNode); + + rb_define_singleton_method(cNokogiriXmlEntityReference, "new", new, -1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_namespace.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_namespace.c new file mode 100644 index 000000000..b16ad4553 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_namespace.c @@ -0,0 +1,181 @@ +#include + +/* + * The lifecycle of a Namespace node is more complicated than other Nodes, for two reasons: + * + * 1. the underlying C structure has a different layout than all the other node structs, with the + * `_private` member where we store a pointer to Ruby object data not being in first position. + * 2. xmlNs structures returned in an xmlNodeset from an XPath query are copies of the document's + * namespaces, and so do not share the same memory lifecycle as everything else in a document. + * + * As a result of 1, you may see special handling of XML_NAMESPACE_DECL node types throughout the + * Nokogiri C code, though I intend to wrap up that logic in ruby_object_{get,set} functions + * shortly. + * + * As a result of 2, you will see we have special handling in this file and in xml_node_set.c to + * carefully manage the memory lifecycle of xmlNs structs to match the Ruby object's GC + * lifecycle. In xml_node_set.c we have local versions of xmlXPathNodeSetDel() and + * xmlXPathFreeNodeSet() that avoid freeing xmlNs structs in the node set. In this file, we decide + * whether or not to call dealloc_namespace() depending on whether the xmlNs struct appears to be + * in an xmlNodeSet (and thus the result of an XPath query) or not. + * + * Yes, this is madness. + */ + +VALUE cNokogiriXmlNamespace ; + +static void +_xml_namespace_dealloc(void *ptr) +{ + /* + * this deallocator is only used for namespace nodes that are part of an xpath + * node set. see noko_xml_namespace_wrap(). + */ + xmlNsPtr ns = ptr; + + if (ns->href) { + xmlFree(DISCARD_CONST_QUAL_XMLCHAR(ns->href)); + } + if (ns->prefix) { + xmlFree(DISCARD_CONST_QUAL_XMLCHAR(ns->prefix)); + } + xmlFree(ns); +} + +static void +_xml_namespace_update_references(void *ptr) +{ + xmlNsPtr ns = ptr; + if (ns->_private) { + ns->_private = (void *)rb_gc_location((VALUE)ns->_private); + } +} + +static const rb_data_type_t xml_ns_type_with_free = { + .wrap_struct_name = "xmlNs (with free)", + .function = { + .dfree = _xml_namespace_dealloc, + .dcompact = _xml_namespace_update_references, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +static const rb_data_type_t xml_ns_type_without_free = { + .wrap_struct_name = "xmlNs (without free)", + .function = { + .dcompact = _xml_namespace_update_references, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +/* + * :call-seq: + * prefix() → String or nil + * + * Return the prefix for this Namespace, or +nil+ if there is no prefix (e.g., default namespace). + * + * *Example* + * + * doc = Nokogiri::XML.parse(<<~XML) + * + * + * + * + * + * XML + * + * doc.root.elements.first.namespace.prefix + * # => nil + * + * doc.root.elements.last.namespace.prefix + * # => "noko" + */ +static VALUE +prefix(VALUE self) +{ + xmlNsPtr ns; + + Noko_Namespace_Get_Struct(self, xmlNs, ns); + if (!ns->prefix) { return Qnil; } + + return NOKOGIRI_STR_NEW2(ns->prefix); +} + +/* + * :call-seq: + * href() → String + * + * Returns the URI reference for this Namespace. + * + * *Example* + * + * doc = Nokogiri::XML.parse(<<~XML) + * + * + * + * + * + * XML + * + * doc.root.elements.first.namespace.href + * # => "http://nokogiri.org/ns/default" + * + * doc.root.elements.last.namespace.href + * # => "http://nokogiri.org/ns/noko" + */ +static VALUE +href(VALUE self) +{ + xmlNsPtr ns; + + Noko_Namespace_Get_Struct(self, xmlNs, ns); + if (!ns->href) { return Qnil; } + + return NOKOGIRI_STR_NEW2(ns->href); +} + +VALUE +noko_xml_namespace_wrap(xmlNsPtr c_namespace, xmlDocPtr c_document) +{ + VALUE rb_namespace; + + if (c_namespace->_private) { + return (VALUE)c_namespace->_private; + } + + if (c_document) { + rb_namespace = TypedData_Wrap_Struct(cNokogiriXmlNamespace, + &xml_ns_type_without_free, + c_namespace); + + if (DOC_RUBY_OBJECT_TEST(c_document)) { + rb_iv_set(rb_namespace, "@document", DOC_RUBY_OBJECT(c_document)); + rb_ary_push(DOC_NODE_CACHE(c_document), rb_namespace); + } + } else { + rb_namespace = TypedData_Wrap_Struct(cNokogiriXmlNamespace, + &xml_ns_type_with_free, + c_namespace); + } + + c_namespace->_private = (void *)rb_namespace; + + return rb_namespace; +} + +VALUE +noko_xml_namespace_wrap_xpath_copy(xmlNsPtr c_namespace) +{ + return noko_xml_namespace_wrap(c_namespace, NULL); +} + +void +noko_init_xml_namespace(void) +{ + cNokogiriXmlNamespace = rb_define_class_under(mNokogiriXml, "Namespace", rb_cObject); + + rb_undef_alloc_func(cNokogiriXmlNamespace); + + rb_define_method(cNokogiriXmlNamespace, "prefix", prefix, 0); + rb_define_method(cNokogiriXmlNamespace, "href", href, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node.c new file mode 100644 index 000000000..111e8f7ec --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node.c @@ -0,0 +1,2459 @@ +#include + +#include + +// :stopdoc: + +VALUE cNokogiriXmlNode ; +static ID id_decorate, id_decorate_bang; + +typedef xmlNodePtr(*pivot_reparentee_func)(xmlNodePtr, xmlNodePtr); + +static void +_xml_node_mark(void *ptr) +{ + xmlNodePtr node = ptr; + + if (!DOC_RUBY_OBJECT_TEST(node->doc)) { + return; + } + + xmlDocPtr doc = node->doc; + if (doc->type == XML_DOCUMENT_NODE || doc->type == XML_HTML_DOCUMENT_NODE) { + if (DOC_RUBY_OBJECT_TEST(doc)) { + rb_gc_mark(DOC_RUBY_OBJECT(doc)); + } + } else if (node->doc->_private) { + rb_gc_mark((VALUE)doc->_private); + } +} + +static void +_xml_node_update_references(void *ptr) +{ + xmlNodePtr node = ptr; + + if (node->_private) { + node->_private = (void *)rb_gc_location((VALUE)node->_private); + } +} + +static const rb_data_type_t xml_node_type = { + .wrap_struct_name = "xmlNode", + .function = { + .dmark = _xml_node_mark, + .dcompact = _xml_node_update_references, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY, +}; + +static VALUE +_xml_node_alloc(VALUE klass) +{ + return TypedData_Wrap_Struct(klass, &xml_node_type, NULL); +} + +static void +_xml_node_data_ptr_set(VALUE rb_node, xmlNodePtr c_node) +{ + assert(DATA_PTR(rb_node) == NULL); + assert(c_node->_private == NULL); + + DATA_PTR(rb_node) = c_node; + c_node->_private = (void *)rb_node; + + return; +} + +static void +relink_namespace(xmlNodePtr reparented) +{ + xmlNodePtr child; + xmlAttrPtr attr; + + if (reparented->type != XML_ATTRIBUTE_NODE && + reparented->type != XML_ELEMENT_NODE) { return; } + + if (reparented->ns == NULL || reparented->ns->prefix == NULL) { + xmlNsPtr ns = NULL; + xmlChar *name = NULL, *prefix = NULL; + + name = xmlSplitQName2(reparented->name, &prefix); + + if (reparented->type == XML_ATTRIBUTE_NODE) { + if (prefix == NULL || strcmp((char *)prefix, XMLNS_PREFIX) == 0) { + xmlFree(name); + xmlFree(prefix); + return; + } + } + + ns = xmlSearchNs(reparented->doc, reparented, prefix); + + if (ns != NULL) { + xmlNodeSetName(reparented, name); + xmlSetNs(reparented, ns); + } + + xmlFree(name); + xmlFree(prefix); + } + + /* Avoid segv when relinking against unlinked nodes. */ + if (reparented->type != XML_ELEMENT_NODE || !reparented->parent) { return; } + + /* Make sure that our reparented node has the correct namespaces */ + if (!reparented->ns && + (reparented->doc != (xmlDocPtr)reparented->parent) && + (rb_iv_get(DOC_RUBY_OBJECT(reparented->doc), "@namespace_inheritance") == Qtrue)) { + xmlSetNs(reparented, reparented->parent->ns); + } + + /* Search our parents for an existing definition */ + if (reparented->nsDef) { + xmlNsPtr curr = reparented->nsDef; + xmlNsPtr prev = NULL; + + while (curr) { + xmlNsPtr ns = xmlSearchNsByHref( + reparented->doc, + reparented->parent, + curr->href + ); + /* If we find the namespace is already declared, remove it from this + * definition list. */ + if (ns && ns != curr && xmlStrEqual(ns->prefix, curr->prefix)) { + if (prev) { + prev->next = curr->next; + } else { + reparented->nsDef = curr->next; + } + noko_xml_document_pin_namespace(curr, reparented->doc); + } else { + prev = curr; + } + curr = curr->next; + } + } + + /* + * Search our parents for an existing definition of current namespace, + * because the definition it's pointing to may have just been removed nsDef. + * + * And although that would technically probably be OK, I'd feel better if we + * referred to a namespace that's still present in a node's nsDef somewhere + * in the doc. + */ + if (reparented->ns) { + xmlNsPtr ns = xmlSearchNs(reparented->doc, reparented, reparented->ns->prefix); + if (ns + && ns != reparented->ns + && xmlStrEqual(ns->prefix, reparented->ns->prefix) + && xmlStrEqual(ns->href, reparented->ns->href) + ) { + xmlSetNs(reparented, ns); + } + } + + /* Only walk all children if there actually is a namespace we need to */ + /* reparent. */ + if (NULL == reparented->ns) { return; } + + /* When a node gets reparented, walk its children to make sure that */ + /* their namespaces are reparented as well. */ + child = reparented->children; + while (NULL != child) { + relink_namespace(child); + child = child->next; + } + + if (reparented->type == XML_ELEMENT_NODE) { + attr = reparented->properties; + while (NULL != attr) { + relink_namespace((xmlNodePtr)attr); + attr = attr->next; + } + } +} + + +/* internal function meant to wrap xmlReplaceNode + and fix some issues we have with libxml2 merging nodes */ +static xmlNodePtr +xmlReplaceNodeWrapper(xmlNodePtr pivot, xmlNodePtr new_node) +{ + xmlNodePtr retval ; + + retval = xmlReplaceNode(pivot, new_node) ; + + if (retval == pivot) { + retval = new_node ; /* return semantics for reparent_node_with */ + } + + /* work around libxml2 issue: https://bugzilla.gnome.org/show_bug.cgi?id=615612 */ + if (retval && retval->type == XML_TEXT_NODE) { + if (retval->prev && retval->prev->type == XML_TEXT_NODE) { + retval = xmlTextMerge(retval->prev, retval); + } + if (retval->next && retval->next->type == XML_TEXT_NODE) { + retval = xmlTextMerge(retval, retval->next); + } + } + + return retval ; +} + + +static void +raise_if_ancestor_of_self(xmlNodePtr self) +{ + for (xmlNodePtr ancestor = self->parent ; ancestor ; ancestor = ancestor->parent) { + if (self == ancestor) { + rb_raise(rb_eRuntimeError, "cycle detected: node '%s' is an ancestor of itself", self->name); + } + } +} + + +static VALUE +reparent_node_with(VALUE pivot_obj, VALUE reparentee_obj, pivot_reparentee_func prf) +{ + VALUE reparented_obj ; + xmlNodePtr reparentee, original_reparentee, pivot, reparented, next_text, new_next_text, parent ; + int original_ns_prefix_is_default = 0 ; + + if (!rb_obj_is_kind_of(reparentee_obj, cNokogiriXmlNode)) { + rb_raise(rb_eArgError, "node must be a Nokogiri::XML::Node"); + } + if (rb_obj_is_kind_of(reparentee_obj, cNokogiriXmlDocument)) { + rb_raise(rb_eArgError, "node must be a Nokogiri::XML::Node"); + } + + Noko_Node_Get_Struct(reparentee_obj, xmlNode, reparentee); + Noko_Node_Get_Struct(pivot_obj, xmlNode, pivot); + + /* + * Check if nodes given are appropriate to have a parent-child + * relationship, based on the DOM specification. + * + * cf. http://www.w3.org/TR/2004/REC-DOM-Level-3-Core-20040407/core.html#ID-1590626202 + */ + if (prf == xmlAddChild) { + parent = pivot; + } else { + parent = pivot->parent; + } + + if (parent) { + switch (parent->type) { + case XML_DOCUMENT_NODE: + case XML_HTML_DOCUMENT_NODE: + switch (reparentee->type) { + case XML_ELEMENT_NODE: + case XML_PI_NODE: + case XML_COMMENT_NODE: + case XML_DOCUMENT_TYPE_NODE: + /* + * The DOM specification says no to adding text-like nodes + * directly to a document, but we allow it for compatibility. + */ + case XML_TEXT_NODE: + case XML_CDATA_SECTION_NODE: + case XML_ENTITY_REF_NODE: + goto ok; + default: + break; + } + break; + case XML_DOCUMENT_FRAG_NODE: + case XML_ENTITY_REF_NODE: + case XML_ELEMENT_NODE: + switch (reparentee->type) { + case XML_ELEMENT_NODE: + case XML_PI_NODE: + case XML_COMMENT_NODE: + case XML_TEXT_NODE: + case XML_CDATA_SECTION_NODE: + case XML_ENTITY_REF_NODE: + goto ok; + default: + break; + } + break; + case XML_ATTRIBUTE_NODE: + switch (reparentee->type) { + case XML_TEXT_NODE: + case XML_ENTITY_REF_NODE: + goto ok; + default: + break; + } + break; + case XML_TEXT_NODE: + /* + * xmlAddChild() breaks the DOM specification in that it allows + * adding a text node to another, in which case text nodes are + * coalesced, but since our JRuby version does not support such + * operation, we should inhibit it. + */ + break; + default: + break; + } + + rb_raise(rb_eArgError, "cannot reparent %s there", rb_obj_classname(reparentee_obj)); + } + +ok: + original_reparentee = reparentee; + + if (reparentee->doc != pivot->doc || reparentee->type == XML_TEXT_NODE) { + /* + * if the reparentee is a text node, there's a very good chance it will be + * merged with an adjacent text node after being reparented, and in that case + * libxml will free the underlying C struct. + * + * since we clearly have a ruby object which references the underlying + * memory, we can't let the C struct get freed. let's pickle the original + * reparentee by rooting it; and then we'll reparent a duplicate of the + * node that we don't care about preserving. + * + * alternatively, if the reparentee is from a different document than the + * pivot node, libxml2 is going to get confused about which document's + * "dictionary" the node's strings belong to (this is an otherwise + * uninteresting libxml2 implementation detail). as a result, we cannot + * reparent the actual reparentee, so we reparent a duplicate. + */ + if (reparentee->type == XML_TEXT_NODE && reparentee->_private) { + /* + * additionally, since we know this C struct isn't going to be related to + * a Ruby object anymore, let's break the relationship on this end as + * well. + * + * this is not absolutely necessary unless libxml-ruby is also in effect, + * in which case its global callback `rxml_node_deregisterNode` will try + * to do things to our data. + * + * for more details on this particular (and particularly nasty) edge + * case, see: + * + * https://github.com/sparklemotion/nokogiri/issues/1426 + */ + reparentee->_private = NULL ; + } + + if (reparentee->ns != NULL && reparentee->ns->prefix == NULL) { + original_ns_prefix_is_default = 1; + } + + noko_xml_document_pin_node(reparentee); + + if (!(reparentee = xmlDocCopyNode(reparentee, pivot->doc, 1))) { + rb_raise(rb_eRuntimeError, "Could not reparent node (xmlDocCopyNode)"); + } + + if (original_ns_prefix_is_default && reparentee->ns != NULL && reparentee->ns->prefix != NULL) { + /* + * issue #391, where new node's prefix may become the string "default" + * see libxml2 tree.c xmlNewReconciliedNs which implements this behavior. + */ + xmlFree(DISCARD_CONST_QUAL_XMLCHAR(reparentee->ns->prefix)); + reparentee->ns->prefix = NULL; + } + } + + xmlUnlinkNode(original_reparentee); + + if (prf != xmlAddPrevSibling && prf != xmlAddNextSibling && prf != xmlAddChild + && reparentee->type == XML_TEXT_NODE && pivot->next && pivot->next->type == XML_TEXT_NODE) { + /* + * libxml merges text nodes in a right-to-left fashion, meaning that if + * there are two text nodes who would be adjacent, the right (or following, + * or next) node will be merged into the left (or preceding, or previous) + * node. + * + * and by "merged" I mean the string contents will be concatenated onto the + * left node's contents, and then the node will be freed. + * + * which means that if we have a ruby object wrapped around the right node, + * its memory would be freed out from under it. + * + * so, we detect this edge case and unlink-and-root the text node before it gets + * merged. then we dup the node and insert that duplicate back into the + * document where the real node was. + * + * yes, this is totally lame. + */ + next_text = pivot->next ; + new_next_text = xmlDocCopyNode(next_text, pivot->doc, 1) ; + + xmlUnlinkNode(next_text); + noko_xml_document_pin_node(next_text); + + xmlAddNextSibling(pivot, new_next_text); + } + + if (!(reparented = (*prf)(pivot, reparentee))) { + rb_raise(rb_eRuntimeError, "Could not reparent node"); + } + + /* + * make sure the ruby object is pointed at the just-reparented node, which + * might be a duplicate (see above) or might be the result of merging + * adjacent text nodes. + */ + DATA_PTR(reparentee_obj) = reparented ; + reparented_obj = noko_xml_node_wrap(Qnil, reparented); + + rb_funcall(reparented_obj, id_decorate_bang, 0); + + /* if we've created a cycle, raise an exception */ + raise_if_ancestor_of_self(reparented); + + relink_namespace(reparented); + + return reparented_obj ; +} + +// :startdoc: + +/* + * :call-seq: + * add_namespace_definition(prefix, href) → Nokogiri::XML::Namespace + * add_namespace(prefix, href) → Nokogiri::XML::Namespace + * + * :category: Manipulating Document Structure + * + * Adds a namespace definition to this node with +prefix+ using +href+ value, as if this node had + * included an attribute "xmlns:prefix=href". + * + * A default namespace definition for this node can be added by passing +nil+ for +prefix+. + * + * [Parameters] + * - +prefix+ (String, +nil+) An {XML Name}[https://www.w3.org/TR/xml-names/#ns-decl] + * - +href+ (String) The {URI reference}[https://www.w3.org/TR/xml-names/#sec-namespaces] + * + * [Returns] The new Nokogiri::XML::Namespace + * + * *Example:* adding a non-default namespace definition + * + * doc = Nokogiri::XML("") + * inventory = doc.at_css("inventory") + * inventory.add_namespace_definition("automobile", "http://alices-autos.com/") + * inventory.add_namespace_definition("bicycle", "http://bobs-bikes.com/") + * inventory.add_child("Michelin model XGV, size 75R") + * doc.to_xml + * # => "\n" + + * # "\n" + + * # " \n" + + * # " Michelin model XGV, size 75R\n" + + * # " \n" + + * # "\n" + * + * *Example:* adding a default namespace definition + * + * doc = Nokogiri::XML("Michelin model XGV, size 75R") + * doc.at_css("tire").add_namespace_definition(nil, "http://bobs-bikes.com/") + * doc.to_xml + * # => "\n" + + * # "\n" + + * # " \n" + + * # " Michelin model XGV, size 75R\n" + + * # " \n" + + * # "\n" + * + */ +static VALUE +rb_xml_node_add_namespace_definition(VALUE rb_node, VALUE rb_prefix, VALUE rb_href) +{ + xmlNodePtr c_node, element; + xmlNsPtr c_namespace; + const xmlChar *c_prefix = (const xmlChar *)(NIL_P(rb_prefix) ? NULL : StringValueCStr(rb_prefix)); + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + element = c_node ; + + c_namespace = xmlSearchNs(c_node->doc, c_node, c_prefix); + + if (!c_namespace) { + if (c_node->type != XML_ELEMENT_NODE) { + element = c_node->parent; + } + c_namespace = xmlNewNs(element, (const xmlChar *)StringValueCStr(rb_href), c_prefix); + } + + if (!c_namespace) { + return Qnil ; + } + + if (NIL_P(rb_prefix) || c_node != element) { + xmlSetNs(c_node, c_namespace); + } + + return noko_xml_namespace_wrap(c_namespace, c_node->doc); +} + + +/* + * :call-seq: attribute(name) → Nokogiri::XML::Attr + * + * :category: Working With Node Attributes + * + * [Returns] Attribute (Nokogiri::XML::Attr) belonging to this node with name +name+. + * + * ⚠ Note that attribute namespaces are ignored and only the simple (non-namespace-prefixed) name is + * used to find a matching attribute. In case of a simple name collision, only one of the matching + * attributes will be returned. In this case, you will need to use #attribute_with_ns. + * + * *Example:* + * + * doc = Nokogiri::XML("") + * child = doc.at_css("child") + * child.attribute("size") # => # + * child.attribute("class") # => # + * + * *Example* showing that namespaced attributes will not be returned: + * + * ⚠ Note that only one of the two matching attributes is returned. + * + * doc = Nokogiri::XML(<<~EOF) + * + * + * + * EOF + * doc.at_css("child").attribute("size") + * # => #(Attr:0x550 { + * # name = "size", + * # namespace = #(Namespace:0x564 { + * # prefix = "width", + * # href = "http://example.com/widths" + * # }), + * # value = "broad" + * # }) + */ +static VALUE +rb_xml_node_attribute(VALUE self, VALUE name) +{ + xmlNodePtr node; + xmlAttrPtr prop; + Noko_Node_Get_Struct(self, xmlNode, node); + prop = xmlHasProp(node, (xmlChar *)StringValueCStr(name)); + + if (! prop) { return Qnil; } + return noko_xml_node_wrap(Qnil, (xmlNodePtr)prop); +} + + +/* + * :call-seq: attribute_nodes() → Array + * + * :category: Working With Node Attributes + * + * [Returns] Attributes (an Array of Nokogiri::XML::Attr) belonging to this node. + * + * Note that this is the preferred alternative to #attributes when the simple + * (non-namespace-prefixed) attribute names may collide. + * + * *Example:* + * + * Contrast this with the colliding-name example from #attributes. + * + * doc = Nokogiri::XML(<<~EOF) + * + * + * + * EOF + * doc.at_css("child").attribute_nodes + * # => [#(Attr:0x550 { + * # name = "size", + * # namespace = #(Namespace:0x564 { + * # prefix = "width", + * # href = "http://example.com/widths" + * # }), + * # value = "broad" + * # }), + * # #(Attr:0x578 { + * # name = "size", + * # namespace = #(Namespace:0x58c { + * # prefix = "height", + * # href = "http://example.com/heights" + * # }), + * # value = "tall" + * # })] + */ +static VALUE +rb_xml_node_attribute_nodes(VALUE rb_node) +{ + xmlNodePtr c_node; + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + return noko_xml_node_attrs(c_node); +} + + +/* + * :call-seq: attribute_with_ns(name, namespace) → Nokogiri::XML::Attr + * + * :category: Working With Node Attributes + * + * [Returns] + * Attribute (Nokogiri::XML::Attr) belonging to this node with matching +name+ and +namespace+. + * + * [Parameters] + * - +name+ (String): the simple (non-namespace-prefixed) name of the attribute + * - +namespace+ (String): the URI of the attribute's namespace + * + * See related: #attribute + * + * *Example:* + * + * doc = Nokogiri::XML(<<~EOF) + * + * + * + * EOF + * doc.at_css("child").attribute_with_ns("size", "http://example.com/widths") + * # => #(Attr:0x550 { + * # name = "size", + * # namespace = #(Namespace:0x564 { + * # prefix = "width", + * # href = "http://example.com/widths" + * # }), + * # value = "broad" + * # }) + * doc.at_css("child").attribute_with_ns("size", "http://example.com/heights") + * # => #(Attr:0x578 { + * # name = "size", + * # namespace = #(Namespace:0x58c { + * # prefix = "height", + * # href = "http://example.com/heights" + * # }), + * # value = "tall" + * # }) + */ +static VALUE +rb_xml_node_attribute_with_ns(VALUE self, VALUE name, VALUE namespace) +{ + xmlNodePtr node; + xmlAttrPtr prop; + Noko_Node_Get_Struct(self, xmlNode, node); + prop = xmlHasNsProp(node, (xmlChar *)StringValueCStr(name), + NIL_P(namespace) ? NULL : (xmlChar *)StringValueCStr(namespace)); + + if (! prop) { return Qnil; } + return noko_xml_node_wrap(Qnil, (xmlNodePtr)prop); +} + + + +/* + * call-seq: blank? → Boolean + * + * [Returns] +true+ if the node is an empty or whitespace-only text or cdata node, else +false+. + * + * *Example:* + * + * Nokogiri("").root.child.blank? # => false + * Nokogiri("\t \n").root.child.blank? # => true + * Nokogiri("").root.child.blank? # => true + * Nokogiri("not-blank").root.child + * .tap { |n| n.content = "" }.blank # => true + */ +static VALUE +rb_xml_node_blank_eh(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + return (1 == xmlIsBlankNode(node)) ? Qtrue : Qfalse ; +} + + +/* + * :call-seq: child() → Nokogiri::XML::Node + * + * :category: Traversing Document Structure + * + * [Returns] First of this node's children, or +nil+ if there are no children + * + * This is a convenience method and is equivalent to: + * + * node.children.first + * + * See related: #children + */ +static VALUE +rb_xml_node_child(VALUE self) +{ + xmlNodePtr node, child; + Noko_Node_Get_Struct(self, xmlNode, node); + + child = node->children; + if (!child) { return Qnil; } + + return noko_xml_node_wrap(Qnil, child); +} + + +/* + * :call-seq: children() → Nokogiri::XML::NodeSet + * + * :category: Traversing Document Structure + * + * [Returns] Nokogiri::XML::NodeSet containing this node's children. + */ +static VALUE +rb_xml_node_children(VALUE self) +{ + xmlNodePtr node; + xmlNodePtr child; + xmlNodeSetPtr set; + VALUE document; + VALUE node_set; + + Noko_Node_Get_Struct(self, xmlNode, node); + + child = node->children; + set = xmlXPathNodeSetCreate(child); + + document = DOC_RUBY_OBJECT(node->doc); + + if (!child) { return noko_xml_node_set_wrap(set, document); } + + child = child->next; + while (NULL != child) { + xmlXPathNodeSetAddUnique(set, child); + child = child->next; + } + + node_set = noko_xml_node_set_wrap(set, document); + + return node_set; +} + + +/* + * :call-seq: + * content() → String + * inner_text() → String + * text() → String + * to_str() → String + * + * [Returns] + * Contents of all the text nodes in this node's subtree, concatenated together into a single + * String. + * + * ⚠ Note that entities will _always_ be expanded in the returned String. + * + * See related: #inner_html + * + * *Example* of how entities are handled: + * + * Note that < becomes < in the returned String. + * + * doc = Nokogiri::XML.fragment("a < b") + * doc.at_css("child").content + * # => "a < b" + * + * *Example* of how a subtree is handled: + * + * Note that the tags are omitted and only the text node contents are returned, + * concatenated into a single string. + * + * doc = Nokogiri::XML.fragment("first second") + * doc.at_css("child").content + * # => "first second" + */ +static VALUE +rb_xml_node_content(VALUE self) +{ + xmlNodePtr node; + xmlChar *content; + + Noko_Node_Get_Struct(self, xmlNode, node); + + content = xmlNodeGetContent(node); + if (content) { + VALUE rval = NOKOGIRI_STR_NEW2(content); + xmlFree(content); + return rval; + } + return Qnil; +} + + +/* + * :call-seq: document() → Nokogiri::XML::Document + * + * :category: Traversing Document Structure + * + * [Returns] Parent Nokogiri::XML::Document for this node + */ +static VALUE +rb_xml_node_document(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + return DOC_RUBY_OBJECT(node->doc); +} + +/* + * :call-seq: pointer_id() → Integer + * + * [Returns] + * A unique id for this node based on the internal memory structures. This method is used by #== + * to determine node identity. + */ +static VALUE +rb_xml_node_pointer_id(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + + return rb_uint2inum((uintptr_t)(node)); +} + +/* + * :call-seq: encode_special_chars(string) → String + * + * Encode any special characters in +string+ + */ +static VALUE +encode_special_chars(VALUE self, VALUE string) +{ + xmlNodePtr node; + xmlChar *encoded; + VALUE encoded_str; + + Noko_Node_Get_Struct(self, xmlNode, node); + encoded = xmlEncodeSpecialChars( + node->doc, + (const xmlChar *)StringValueCStr(string) + ); + + encoded_str = NOKOGIRI_STR_NEW2(encoded); + xmlFree(encoded); + + return encoded_str; +} + +/* + * :call-seq: + * create_internal_subset(name, external_id, system_id) + * + * Create the internal subset of a document. + * + * doc.create_internal_subset("chapter", "-//OASIS//DTD DocBook XML//EN", "chapter.dtd") + * # => + * + * doc.create_internal_subset("chapter", nil, "chapter.dtd") + * # => + */ +static VALUE +create_internal_subset(VALUE self, VALUE name, VALUE external_id, VALUE system_id) +{ + xmlNodePtr node; + xmlDocPtr doc; + xmlDtdPtr dtd; + + Noko_Node_Get_Struct(self, xmlNode, node); + + doc = node->doc; + + if (xmlGetIntSubset(doc)) { + rb_raise(rb_eRuntimeError, "Document already has an internal subset"); + } + + dtd = xmlCreateIntSubset( + doc, + NIL_P(name) ? NULL : (const xmlChar *)StringValueCStr(name), + NIL_P(external_id) ? NULL : (const xmlChar *)StringValueCStr(external_id), + NIL_P(system_id) ? NULL : (const xmlChar *)StringValueCStr(system_id) + ); + + if (!dtd) { return Qnil; } + + return noko_xml_node_wrap(Qnil, (xmlNodePtr)dtd); +} + +/* + * :call-seq: + * create_external_subset(name, external_id, system_id) + * + * Create an external subset + */ +static VALUE +create_external_subset(VALUE self, VALUE name, VALUE external_id, VALUE system_id) +{ + xmlNodePtr node; + xmlDocPtr doc; + xmlDtdPtr dtd; + + Noko_Node_Get_Struct(self, xmlNode, node); + + doc = node->doc; + + if (doc->extSubset) { + rb_raise(rb_eRuntimeError, "Document already has an external subset"); + } + + dtd = xmlNewDtd( + doc, + NIL_P(name) ? NULL : (const xmlChar *)StringValueCStr(name), + NIL_P(external_id) ? NULL : (const xmlChar *)StringValueCStr(external_id), + NIL_P(system_id) ? NULL : (const xmlChar *)StringValueCStr(system_id) + ); + + if (!dtd) { return Qnil; } + + return noko_xml_node_wrap(Qnil, (xmlNodePtr)dtd); +} + +/* + * :call-seq: + * external_subset() + * + * Get the external subset + */ +static VALUE +external_subset(VALUE self) +{ + xmlNodePtr node; + xmlDocPtr doc; + xmlDtdPtr dtd; + + Noko_Node_Get_Struct(self, xmlNode, node); + + if (!node->doc) { return Qnil; } + + doc = node->doc; + dtd = doc->extSubset; + + if (!dtd) { return Qnil; } + + return noko_xml_node_wrap(Qnil, (xmlNodePtr)dtd); +} + +/* + * :call-seq: + * internal_subset() + * + * Get the internal subset + */ +static VALUE +internal_subset(VALUE self) +{ + xmlNodePtr node; + xmlDocPtr doc; + xmlDtdPtr dtd; + + Noko_Node_Get_Struct(self, xmlNode, node); + + if (!node->doc) { return Qnil; } + + doc = node->doc; + dtd = xmlGetIntSubset(doc); + + if (!dtd) { return Qnil; } + + return noko_xml_node_wrap(Qnil, (xmlNodePtr)dtd); +} + +/* :nodoc: */ +static VALUE +rb_xml_node_initialize_copy_with_args(VALUE rb_self, VALUE rb_other, VALUE rb_level, VALUE rb_new_parent_doc) +{ + xmlNodePtr c_self, c_other; + int c_level; + xmlDocPtr c_new_parent_doc; + VALUE rb_node_cache; + + Noko_Node_Get_Struct(rb_other, xmlNode, c_other); + c_level = (int)NUM2INT(rb_level); + c_new_parent_doc = noko_xml_document_unwrap(rb_new_parent_doc); + + c_self = xmlDocCopyNode(c_other, c_new_parent_doc, c_level); + if (c_self == NULL) { return Qnil; } + + _xml_node_data_ptr_set(rb_self, c_self); + noko_xml_document_pin_node(c_self); + + rb_node_cache = DOC_NODE_CACHE(c_new_parent_doc); + rb_ary_push(rb_node_cache, rb_self); + rb_funcall(rb_new_parent_doc, id_decorate, 1, rb_self); + + return rb_self; +} + +/* + * :call-seq: + * unlink() → self + * + * Unlink this node from its current context. + */ +static VALUE +unlink_node(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + xmlUnlinkNode(node); + noko_xml_document_pin_node(node); + return self; +} + + +/* + * call-seq: + * next_sibling + * + * Returns the next sibling node + */ +static VALUE +next_sibling(VALUE self) +{ + xmlNodePtr node, sibling; + Noko_Node_Get_Struct(self, xmlNode, node); + + sibling = node->next; + if (!sibling) { return Qnil; } + + return noko_xml_node_wrap(Qnil, sibling) ; +} + +/* + * call-seq: + * previous_sibling + * + * Returns the previous sibling node + */ +static VALUE +previous_sibling(VALUE self) +{ + xmlNodePtr node, sibling; + Noko_Node_Get_Struct(self, xmlNode, node); + + sibling = node->prev; + if (!sibling) { return Qnil; } + + return noko_xml_node_wrap(Qnil, sibling); +} + +/* + * call-seq: + * next_element + * + * Returns the next Nokogiri::XML::Element type sibling node. + */ +static VALUE +next_element(VALUE self) +{ + xmlNodePtr node, sibling; + Noko_Node_Get_Struct(self, xmlNode, node); + + sibling = xmlNextElementSibling(node); + if (!sibling) { return Qnil; } + + return noko_xml_node_wrap(Qnil, sibling); +} + +/* + * call-seq: + * previous_element + * + * Returns the previous Nokogiri::XML::Element type sibling node. + */ +static VALUE +previous_element(VALUE self) +{ + xmlNodePtr node, sibling; + Noko_Node_Get_Struct(self, xmlNode, node); + + sibling = xmlPreviousElementSibling(node); + if (!sibling) { return Qnil; } + + return noko_xml_node_wrap(Qnil, sibling); +} + +/* :nodoc: */ +static VALUE +replace(VALUE self, VALUE new_node) +{ + VALUE reparent = reparent_node_with(self, new_node, xmlReplaceNodeWrapper); + + xmlNodePtr pivot; + Noko_Node_Get_Struct(self, xmlNode, pivot); + noko_xml_document_pin_node(pivot); + + return reparent; +} + +/* + * :call-seq: + * element_children() → NodeSet + * elements() → NodeSet + * + * [Returns] + * The node's child elements as a NodeSet. Only children that are elements will be returned, which + * notably excludes Text nodes. + * + * *Example:* + * + * Note that #children returns the Text node "hello" while #element_children does not. + * + * div = Nokogiri::HTML5("
helloworld").at_css("div") + * div.element_children + * # => [#]>] + * div.children + * # => [#, + * # #]>] + */ +static VALUE +rb_xml_node_element_children(VALUE self) +{ + xmlNodePtr node; + xmlNodePtr child; + xmlNodeSetPtr set; + VALUE document; + VALUE node_set; + + Noko_Node_Get_Struct(self, xmlNode, node); + + child = xmlFirstElementChild(node); + set = xmlXPathNodeSetCreate(child); + + document = DOC_RUBY_OBJECT(node->doc); + + if (!child) { return noko_xml_node_set_wrap(set, document); } + + child = xmlNextElementSibling(child); + while (NULL != child) { + xmlXPathNodeSetAddUnique(set, child); + child = xmlNextElementSibling(child); + } + + node_set = noko_xml_node_set_wrap(set, document); + + return node_set; +} + +/* + * :call-seq: + * first_element_child() → Node + * + * [Returns] The first child Node that is an element. + * + * *Example:* + * + * Note that the "hello" child, which is a Text node, is skipped and the element is + * returned. + * + * div = Nokogiri::HTML5("
helloworld").at_css("div") + * div.first_element_child + * # => #(Element:0x3c { name = "span", children = [ #(Text "world")] }) + */ +static VALUE +rb_xml_node_first_element_child(VALUE self) +{ + xmlNodePtr node, child; + Noko_Node_Get_Struct(self, xmlNode, node); + + child = xmlFirstElementChild(node); + if (!child) { return Qnil; } + + return noko_xml_node_wrap(Qnil, child); +} + +/* + * :call-seq: + * last_element_child() → Node + * + * [Returns] The last child Node that is an element. + * + * *Example:* + * + * Note that the "hello" child, which is a Text node, is skipped and the yes + * element is returned. + * + * div = Nokogiri::HTML5("
noyesskip
").at_css("div") + * div.last_element_child + * # => #(Element:0x3c { name = "span", children = [ #(Text "yes")] }) + */ +static VALUE +rb_xml_node_last_element_child(VALUE self) +{ + xmlNodePtr node, child; + Noko_Node_Get_Struct(self, xmlNode, node); + + child = xmlLastElementChild(node); + if (!child) { return Qnil; } + + return noko_xml_node_wrap(Qnil, child); +} + +/* + * call-seq: + * key?(attribute) + * + * Returns true if +attribute+ is set + */ +static VALUE +key_eh(VALUE self, VALUE attribute) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + if (xmlHasProp(node, (xmlChar *)StringValueCStr(attribute))) { + return Qtrue; + } + return Qfalse; +} + +/* + * call-seq: + * namespaced_key?(attribute, namespace) + * + * Returns true if +attribute+ is set with +namespace+ + */ +static VALUE +namespaced_key_eh(VALUE self, VALUE attribute, VALUE namespace) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + if (xmlHasNsProp(node, (xmlChar *)StringValueCStr(attribute), + NIL_P(namespace) ? NULL : (xmlChar *)StringValueCStr(namespace))) { + return Qtrue; + } + return Qfalse; +} + +/* + * call-seq: + * []=(property, value) + * + * Set the +property+ to +value+ + */ +static VALUE +set(VALUE self, VALUE property, VALUE value) +{ + xmlNodePtr node, cur; + xmlAttrPtr prop; + Noko_Node_Get_Struct(self, xmlNode, node); + + /* If a matching attribute node already exists, then xmlSetProp will destroy + * the existing node's children. However, if Nokogiri has a node object + * pointing to one of those children, we are left with a broken reference. + * + * We can avoid this by unlinking these nodes first. + */ + if (node->type != XML_ELEMENT_NODE) { + return (Qnil); + } + prop = xmlHasProp(node, (xmlChar *)StringValueCStr(property)); + if (prop && prop->children) { + for (cur = prop->children; cur; cur = cur->next) { + if (cur->_private) { + noko_xml_document_pin_node(cur); + xmlUnlinkNode(cur); + } + } + } + + xmlSetProp(node, (xmlChar *)StringValueCStr(property), + (xmlChar *)StringValueCStr(value)); + + return value; +} + +/* + * call-seq: + * get(attribute) + * + * Get the value for +attribute+ + */ +static VALUE +get(VALUE self, VALUE rattribute) +{ + xmlNodePtr node; + xmlChar *value = 0; + VALUE rvalue; + xmlChar *colon; + xmlChar *attribute, *attr_name, *prefix; + xmlNsPtr ns; + + if (NIL_P(rattribute)) { return Qnil; } + + Noko_Node_Get_Struct(self, xmlNode, node); + attribute = xmlCharStrdup(StringValueCStr(rattribute)); + + colon = DISCARD_CONST_QUAL_XMLCHAR(xmlStrchr(attribute, (const xmlChar)':')); + if (colon) { + /* split the attribute string into separate prefix and name by + * null-terminating the prefix at the colon */ + prefix = attribute; + attr_name = colon + 1; + (*colon) = 0; + + ns = xmlSearchNs(node->doc, node, prefix); + if (ns) { + value = xmlGetNsProp(node, attr_name, ns->href); + } else { + value = xmlGetProp(node, (xmlChar *)StringValueCStr(rattribute)); + } + } else { + value = xmlGetNoNsProp(node, attribute); + } + + xmlFree((void *)attribute); + if (!value) { return Qnil; } + + rvalue = NOKOGIRI_STR_NEW2(value); + xmlFree((void *)value); + + return rvalue ; +} + +/* + * call-seq: + * set_namespace(namespace) + * + * Set the namespace to +namespace+ + */ +static VALUE +set_namespace(VALUE self, VALUE namespace) +{ + xmlNodePtr node; + xmlNsPtr ns = NULL; + + Noko_Node_Get_Struct(self, xmlNode, node); + + if (!NIL_P(namespace)) { + Noko_Namespace_Get_Struct(namespace, xmlNs, ns); + } + + xmlSetNs(node, ns); + + return self; +} + +/* + * :call-seq: + * namespace() → Namespace + * + * [Returns] The Namespace of the element or attribute node, or +nil+ if there is no namespace. + * + * *Example:* + * + * doc = Nokogiri::XML(<<~EOF) + * + * + * + * + * + * EOF + * doc.at_xpath("//first").namespace + * # => nil + * doc.at_xpath("//xmlns:second", "xmlns" => "http://example.com/child").namespace + * # => #(Namespace:0x3c { href = "http://example.com/child" }) + * doc.at_xpath("//foo:third", "foo" => "http://example.com/foo").namespace + * # => #(Namespace:0x50 { prefix = "foo", href = "http://example.com/foo" }) + */ +static VALUE +rb_xml_node_namespace(VALUE rb_node) +{ + xmlNodePtr c_node ; + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + if (c_node->ns) { + return noko_xml_namespace_wrap(c_node->ns, c_node->doc); + } + + return Qnil ; +} + +/* + * :call-seq: + * namespace_definitions() → Array + * + * [Returns] + * Namespaces that are defined directly on this node, as an Array of Namespace objects. The array + * will be empty if no namespaces are defined on this node. + * + * *Example:* + * + * doc = Nokogiri::XML(<<~EOF) + * + * + * + * + * + * EOF + * doc.at_xpath("//root:first", "root" => "http://example.com/root").namespace_definitions + * # => [] + * doc.at_xpath("//xmlns:second", "xmlns" => "http://example.com/child").namespace_definitions + * # => [#(Namespace:0x3c { href = "http://example.com/child" }), + * # #(Namespace:0x50 { + * # prefix = "unused", + * # href = "http://example.com/unused" + * # })] + * doc.at_xpath("//foo:third", "foo" => "http://example.com/foo").namespace_definitions + * # => [#(Namespace:0x64 { prefix = "foo", href = "http://example.com/foo" })] + */ +static VALUE +namespace_definitions(VALUE rb_node) +{ + /* this code in the mode of xmlHasProp() */ + xmlNodePtr c_node ; + xmlNsPtr c_namespace; + VALUE definitions = rb_ary_new(); + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + c_namespace = c_node->nsDef; + if (!c_namespace) { + return definitions; + } + + while (c_namespace != NULL) { + rb_ary_push(definitions, noko_xml_namespace_wrap(c_namespace, c_node->doc)); + c_namespace = c_namespace->next; + } + + return definitions; +} + +/* + * :call-seq: + * namespace_scopes() → Array + * + * [Returns] Array of all the Namespaces on this node and its ancestors. + * + * See also #namespaces + * + * *Example:* + * + * doc = Nokogiri::XML(<<~EOF) + * + * + * + * + * + * EOF + * doc.at_xpath("//root:first", "root" => "http://example.com/root").namespace_scopes + * # => [#(Namespace:0x3c { href = "http://example.com/root" }), + * # #(Namespace:0x50 { prefix = "bar", href = "http://example.com/bar" })] + * doc.at_xpath("//child:second", "child" => "http://example.com/child").namespace_scopes + * # => [#(Namespace:0x64 { href = "http://example.com/child" }), + * # #(Namespace:0x50 { prefix = "bar", href = "http://example.com/bar" })] + * doc.at_xpath("//root:third", "root" => "http://example.com/root").namespace_scopes + * # => [#(Namespace:0x78 { prefix = "foo", href = "http://example.com/foo" }), + * # #(Namespace:0x3c { href = "http://example.com/root" }), + * # #(Namespace:0x50 { prefix = "bar", href = "http://example.com/bar" })] + */ +static VALUE +rb_xml_node_namespace_scopes(VALUE rb_node) +{ + xmlNodePtr c_node ; + xmlNsPtr *namespaces; + VALUE scopes = rb_ary_new(); + int j; + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + namespaces = xmlGetNsList(c_node->doc, c_node); + if (!namespaces) { + return scopes; + } + + for (j = 0 ; namespaces[j] != NULL ; ++j) { + rb_ary_push(scopes, noko_xml_namespace_wrap(namespaces[j], c_node->doc)); + } + + xmlFree(namespaces); + return scopes; +} + +/* + * call-seq: + * node_type + * + * Get the type for this Node + */ +static VALUE +node_type(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + return INT2NUM(node->type); +} + +/* + * call-seq: + * native_content=(input) + * + * Set the content of this node to +input+. + * + * [Parameters] + * - +input+ (String) The new content for this node. + * + * ⚠ This method behaves differently depending on the node type. For Text, CDATA, Comment, and + * ProcessingInstruction nodes, it treats the input as raw content, which means that the final DOM + * will contain the entity-escaped version of the input (see example below). For Element and Attr + * nodes, it treats the input as parsed content and expects it to be valid markup that is already + * entity-escaped. + * + * 💡 Use Node#content= for a more consistent API across node types. + * + * [Example] + * Note the behavior differences of this method between Text and Element nodes: + * + * doc = Nokogiri::HTML::Document.parse(<<~HTML) + * + * + *
asdf
+ *
asdf
+ * HTML + * + * text_node = doc.at_css("div#first").children.first + * div_node = doc.at_css("div#second") + * + * value = "You & Me" + * + * text_node.native_content = value + * div_node.native_content = value + * + * doc.css("div").to_html + * # => "
You &amp; Me
+ * #
You & Me
" + * + * See also: #content= + */ +static VALUE +set_native_content(VALUE self, VALUE content) +{ + xmlNodePtr node, child, next ; + Noko_Node_Get_Struct(self, xmlNode, node); + + child = node->children; + while (NULL != child) { + next = child->next ; + xmlUnlinkNode(child) ; + noko_xml_document_pin_node(child); + child = next ; + } + + xmlNodeSetContent(node, (xmlChar *)StringValueCStr(content)); + return content; +} + +/* + * call-seq: + * lang= + * + * Set the language of a node, i.e. the values of the xml:lang attribute. + */ +static VALUE +set_lang(VALUE self_rb, VALUE lang_rb) +{ + xmlNodePtr self ; + xmlChar *lang ; + + Noko_Node_Get_Struct(self_rb, xmlNode, self); + lang = (xmlChar *)StringValueCStr(lang_rb); + + xmlNodeSetLang(self, lang); + + return Qnil ; +} + +/* + * call-seq: + * lang + * + * Searches the language of a node, i.e. the values of the xml:lang attribute or + * the one carried by the nearest ancestor. + */ +static VALUE +get_lang(VALUE self_rb) +{ + xmlNodePtr self ; + xmlChar *lang ; + VALUE lang_rb ; + + Noko_Node_Get_Struct(self_rb, xmlNode, self); + + lang = xmlNodeGetLang(self); + if (lang) { + lang_rb = NOKOGIRI_STR_NEW2(lang); + xmlFree(lang); + return lang_rb ; + } + + return Qnil ; +} + +/* :nodoc: */ +static VALUE +add_child(VALUE self, VALUE new_child) +{ + return reparent_node_with(self, new_child, xmlAddChild); +} + +/* + * call-seq: + * parent + * + * Get the parent Node for this Node + */ +static VALUE +get_parent(VALUE self) +{ + xmlNodePtr node, parent; + Noko_Node_Get_Struct(self, xmlNode, node); + + parent = node->parent; + if (!parent) { return Qnil; } + + return noko_xml_node_wrap(Qnil, parent) ; +} + +/* + * call-seq: + * name=(new_name) + * + * Set the name for this Node + */ +static VALUE +set_name(VALUE self, VALUE new_name) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + xmlNodeSetName(node, (xmlChar *)StringValueCStr(new_name)); + return new_name; +} + +/* + * call-seq: + * name + * + * Returns the name for this Node + */ +static VALUE +get_name(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + if (node->name) { + return NOKOGIRI_STR_NEW2(node->name); + } + return Qnil; +} + +/* + * call-seq: + * path + * + * Returns the path associated with this Node + */ +static VALUE +rb_xml_node_path(VALUE rb_node) +{ + xmlNodePtr c_node; + xmlChar *c_path ; + VALUE rval; + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + c_path = xmlGetNodePath(c_node); + if (c_path == NULL) { + // see https://github.com/sparklemotion/nokogiri/issues/2250 + // this behavior is clearly undesirable, but is what libxml <= 2.9.10 returned, and so we + // do this for now to preserve the behavior across libxml2 versions. + rval = NOKOGIRI_STR_NEW2("?"); + } else { + rval = NOKOGIRI_STR_NEW2(c_path); + xmlFree(c_path); + } + + return rval ; +} + +/* :nodoc: */ +static VALUE +add_next_sibling(VALUE self, VALUE new_sibling) +{ + return reparent_node_with(self, new_sibling, xmlAddNextSibling) ; +} + +/* :nodoc: */ +static VALUE +add_previous_sibling(VALUE self, VALUE new_sibling) +{ + return reparent_node_with(self, new_sibling, xmlAddPrevSibling) ; +} + +/* + * call-seq: + * native_write_to(io, encoding, options) + * + * Write this Node to +io+ with +encoding+ and +options+ + */ +static VALUE +native_write_to( + VALUE self, + VALUE io, + VALUE encoding, + VALUE indent_string, + VALUE options +) +{ + xmlNodePtr node; + const char *before_indent; + xmlSaveCtxtPtr savectx; + + Noko_Node_Get_Struct(self, xmlNode, node); + + xmlIndentTreeOutput = 1; + + before_indent = xmlTreeIndentString; + + xmlTreeIndentString = StringValueCStr(indent_string); + + savectx = xmlSaveToIO( + (xmlOutputWriteCallback)noko_io_write, + (xmlOutputCloseCallback)noko_io_close, + (void *)io, + RTEST(encoding) ? StringValueCStr(encoding) : NULL, + (int)NUM2INT(options) + ); + + xmlSaveTree(savectx, node); + xmlSaveClose(savectx); + + xmlTreeIndentString = before_indent; + return io; +} + + +static inline void +output_partial_string(VALUE out, char const *str, size_t length) +{ + if (length) { + rb_enc_str_buf_cat(out, str, (long)length, rb_utf8_encoding()); + } +} + +static inline void +output_char(VALUE out, char ch) +{ + output_partial_string(out, &ch, 1); +} + +static inline void +output_string(VALUE out, char const *str) +{ + output_partial_string(out, str, strlen(str)); +} + +static inline void +output_tagname(VALUE out, xmlNodePtr elem) +{ + // Elements in the HTML, MathML, and SVG namespaces do not use a namespace + // prefix in the HTML syntax. + char const *name = (char const *)elem->name; + xmlNsPtr ns = elem->ns; + if (ns && ns->href && ns->prefix + && strcmp((char const *)ns->href, "http://www.w3.org/1999/xhtml") + && strcmp((char const *)ns->href, "http://www.w3.org/1998/Math/MathML") + && strcmp((char const *)ns->href, "http://www.w3.org/2000/svg")) { + output_string(out, (char const *)elem->ns->prefix); + output_char(out, ':'); + char const *colon = strchr(name, ':'); + if (colon) { + name = colon + 1; + } + } + output_string(out, name); +} + +static inline void +output_attr_name(VALUE out, xmlAttrPtr attr) +{ + xmlNsPtr ns = attr->ns; + char const *name = (char const *)attr->name; + if (ns && ns->href) { + char const *uri = (char const *)ns->href; + char const *localname = strchr(name, ':'); + if (localname) { + ++localname; + } else { + localname = name; + } + + if (!strcmp(uri, "http://www.w3.org/XML/1998/namespace")) { + output_string(out, "xml:"); + name = localname; + } else if (!strcmp(uri, "http://www.w3.org/2000/xmlns/")) { + // xmlns:xmlns -> xmlns + // xmlns:foo -> xmlns:foo + if (strcmp(localname, "xmlns")) { + output_string(out, "xmlns:"); + } + name = localname; + } else if (!strcmp(uri, "http://www.w3.org/1999/xlink")) { + output_string(out, "xlink:"); + name = localname; + } else if (ns->prefix) { + output_string(out, (char const *)ns->prefix); + output_char(out, ':'); + name = localname; + } + } + output_string(out, name); +} + +static void +output_escaped_string(VALUE out, xmlChar const *start, bool attr) +{ + xmlChar const *next = start; + int ch; + + while ((ch = *next) != 0) { + char const *replacement = NULL; + size_t replaced_bytes = 1; + if (ch == '&') { + replacement = "&"; + } else if (ch == 0xC2 && next[1] == 0xA0) { + // U+00A0 NO-BREAK SPACE has the UTF-8 encoding C2 A0. + replacement = " "; + replaced_bytes = 2; + } else if (attr && ch == '"') { + replacement = """; + } else if (!attr && ch == '<') { + replacement = "<"; + } else if (!attr && ch == '>') { + replacement = ">"; + } else { + ++next; + continue; + } + output_partial_string(out, (char const *)start, (size_t)(next - start)); + output_string(out, replacement); + next += replaced_bytes; + start = next; + } + output_partial_string(out, (char const *)start, (size_t)(next - start)); +} + +static bool +should_prepend_newline(xmlNodePtr node) +{ + char const *name = (char const *)node->name; + xmlNodePtr child = node->children; + + if (!name || !child || (strcmp(name, "pre") && strcmp(name, "textarea") && strcmp(name, "listing"))) { + return false; + } + + return child->type == XML_TEXT_NODE && child->content && child->content[0] == '\n'; +} + +static VALUE +rb_prepend_newline(VALUE self) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + return should_prepend_newline(node) ? Qtrue : Qfalse; +} + +static bool +is_one_of(xmlNodePtr node, char const *const *tagnames, size_t num_tagnames) +{ + char const *name = (char const *)node->name; + if (name == NULL) { // fragments don't have a name + return false; + } + + if (node->ns != NULL) { + // if the node has a namespace, it's in a foreign context and is not one of the HTML tags we're + // matching against. + return false; + } + + for (size_t idx = 0; idx < num_tagnames; ++idx) { + if (!strcmp(name, tagnames[idx])) { + return true; + } + } + return false; +} + +static void +output_node( + VALUE out, + xmlNodePtr node, + bool preserve_newline +) +{ + static char const *const VOID_ELEMENTS[] = { + "area", "base", "basefont", "bgsound", "br", "col", "embed", "frame", "hr", + "img", "input", "keygen", "link", "meta", "param", "source", "track", "wbr", + }; + + static char const *const UNESCAPED_TEXT_ELEMENTS[] = { + "style", "script", "xmp", "iframe", "noembed", "noframes", "plaintext", "noscript", + }; + + switch (node->type) { + case XML_ELEMENT_NODE: + // Serialize the start tag. + output_char(out, '<'); + output_tagname(out, node); + + // Add attributes. + for (xmlAttrPtr attr = node->properties; attr; attr = attr->next) { + output_char(out, ' '); + output_node(out, (xmlNodePtr)attr, preserve_newline); + } + output_char(out, '>'); + + // Add children and end tag if element is not void. + if (!is_one_of(node, VOID_ELEMENTS, sizeof VOID_ELEMENTS / sizeof VOID_ELEMENTS[0])) { + if (preserve_newline && should_prepend_newline(node)) { + output_char(out, '\n'); + } + for (xmlNodePtr child = node->children; child; child = child->next) { + output_node(out, child, preserve_newline); + } + output_string(out, "'); + } + break; + + case XML_ATTRIBUTE_NODE: { + xmlAttrPtr attr = (xmlAttrPtr)node; + output_attr_name(out, attr); + if (attr->children) { + output_string(out, "=\""); + xmlChar *value = xmlNodeListGetString(attr->doc, attr->children, 1); + output_escaped_string(out, value, true); + xmlFree(value); + output_char(out, '"'); + } else { + // Output name="" + output_string(out, "=\"\""); + } + } + break; + + case XML_TEXT_NODE: + if (node->parent + && is_one_of(node->parent, UNESCAPED_TEXT_ELEMENTS, + sizeof UNESCAPED_TEXT_ELEMENTS / sizeof UNESCAPED_TEXT_ELEMENTS[0])) { + output_string(out, (char const *)node->content); + } else { + output_escaped_string(out, node->content, false); + } + break; + + case XML_CDATA_SECTION_NODE: + output_string(out, "content); + output_string(out, "]]>"); + break; + + case XML_COMMENT_NODE: + output_string(out, ""); + break; + + case XML_PI_NODE: + output_string(out, "content); + output_char(out, '>'); + break; + + case XML_DOCUMENT_TYPE_NODE: + case XML_DTD_NODE: + output_string(out, "name); + output_string(out, ">"); + break; + + case XML_DOCUMENT_NODE: + case XML_DOCUMENT_FRAG_NODE: + case XML_HTML_DOCUMENT_NODE: + for (xmlNodePtr child = node->children; child; child = child->next) { + output_node(out, child, preserve_newline); + } + break; + + default: + rb_raise(rb_eRuntimeError, "Unsupported document node (%d); this is a bug in Nokogiri", node->type); + break; + } +} + +static VALUE +html_standard_serialize( + VALUE self, + VALUE preserve_newline +) +{ + xmlNodePtr node; + Noko_Node_Get_Struct(self, xmlNode, node); + VALUE output = rb_str_buf_new(4096); + output_node(output, node, RTEST(preserve_newline)); + return output; +} + +/* + * :call-seq: + * line() → Integer + * + * [Returns] The line number of this Node. + * + * --- + * + * ⚠ The CRuby and JRuby implementations differ in important ways! + * + * Semantic differences: + * - The CRuby method reflects the node's line number in the parsed string + * - The JRuby method reflects the node's line number in the final DOM structure after + * corrections have been applied + * + * Performance differences: + * - The CRuby method is {O(1)}[https://en.wikipedia.org/wiki/Time_complexity#Constant_time] + * (constant time) + * - The JRuby method is {O(n)}[https://en.wikipedia.org/wiki/Time_complexity#Linear_time] (linear + * time, where n is the number of nodes before/above the element in the DOM) + * + * If you'd like to help improve the JRuby implementation, please review these issues and reach out + * to the maintainers: + * - https://github.com/sparklemotion/nokogiri/issues/1223 + * - https://github.com/sparklemotion/nokogiri/pull/2177 + * - https://github.com/sparklemotion/nokogiri/issues/2380 + */ +static VALUE +rb_xml_node_line(VALUE rb_node) +{ + xmlNodePtr c_node; + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + return LONG2NUM(xmlGetLineNo(c_node)); +} + +/* + * call-seq: + * line=(num) + * + * Sets the line for this Node. num must be less than 65535. + */ +static VALUE +rb_xml_node_line_set(VALUE rb_node, VALUE rb_line_number) +{ + xmlNodePtr c_node; + int line_number = NUM2INT(rb_line_number); + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + // libxml2 optionally uses xmlNode.psvi to store longer line numbers, but only for text nodes. + // search for "psvi" in SAX2.c and tree.c to learn more. + if (line_number < 65535) { + c_node->line = (short unsigned)line_number; + } else { + c_node->line = 65535; + if (c_node->type == XML_TEXT_NODE) { + c_node->psvi = (void *)(ptrdiff_t)line_number; + } + } + + return rb_line_number; +} + +/* :nodoc: documented in lib/nokogiri/xml/node.rb */ +static VALUE +rb_xml_node_new(int argc, VALUE *argv, VALUE klass) +{ + xmlNodePtr c_document_node; + xmlNodePtr c_node; + VALUE rb_name; + VALUE rb_document_node; + VALUE rest; + VALUE rb_node; + + rb_scan_args(argc, argv, "2*", &rb_name, &rb_document_node, &rest); + + if (!rb_obj_is_kind_of(rb_document_node, cNokogiriXmlNode)) { + rb_raise(rb_eArgError, "document must be a Nokogiri::XML::Node"); + } + if (!rb_obj_is_kind_of(rb_document_node, cNokogiriXmlDocument)) { + NOKO_WARN_DEPRECATION("Passing a Node as the second parameter to Node.new is deprecated. Please pass a Document instead, or prefer an alternative constructor like Node#add_child. This will become an error in Nokogiri v1.17.0."); // TODO: deprecated in v1.13.0, remove in v1.17.0 + } + Noko_Node_Get_Struct(rb_document_node, xmlNode, c_document_node); + + c_node = xmlNewNode(NULL, (xmlChar *)StringValueCStr(rb_name)); + c_node->doc = c_document_node->doc; + noko_xml_document_pin_node(c_node); + + rb_node = noko_xml_node_wrap( + klass == cNokogiriXmlNode ? (VALUE)NULL : klass, + c_node + ); + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { rb_yield(rb_node); } + + return rb_node; +} + +/* + * call-seq: + * dump_html + * + * Returns the Node as html. + */ +static VALUE +dump_html(VALUE self) +{ + xmlBufferPtr buf ; + xmlNodePtr node ; + VALUE html; + + Noko_Node_Get_Struct(self, xmlNode, node); + + buf = xmlBufferCreate() ; + htmlNodeDump(buf, node->doc, node); + html = NOKOGIRI_STR_NEW2(xmlBufferContent(buf)); + xmlBufferFree(buf); + return html ; +} + +/* + * call-seq: + * compare(other) + * + * Compare this Node to +other+ with respect to their Document + */ +static VALUE +compare(VALUE self, VALUE _other) +{ + xmlNodePtr node, other; + Noko_Node_Get_Struct(self, xmlNode, node); + Noko_Node_Get_Struct(_other, xmlNode, other); + + return INT2NUM(xmlXPathCmpNodes(other, node)); +} + + +/* + * call-seq: + * process_xincludes(flags) + * + * Loads and substitutes all xinclude elements below the node. The + * parser context will be initialized with +flags+. + */ +static VALUE +noko_xml_node__process_xincludes(VALUE rb_node, VALUE rb_flags) +{ + int status ; + xmlNodePtr c_node; + VALUE rb_errors = rb_ary_new(); + libxmlStructuredErrorHandlerState handler_state; + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + + status = xmlXIncludeProcessTreeFlags(c_node, (int)NUM2INT(rb_flags)); + + noko__structured_error_func_restore(&handler_state); + + if (status < 0) { + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Could not perform xinclude substitution"); + } + } + + return rb_node; +} + + +/* TODO: DOCUMENT ME */ +static VALUE +in_context(VALUE self, VALUE _str, VALUE _options) +{ + xmlNodePtr node, list = 0, tmp, child_iter, node_children, doc_children; + xmlNodeSetPtr set; + xmlParserErrors error; + VALUE doc, err; + int doc_is_empty; + + Noko_Node_Get_Struct(self, xmlNode, node); + + doc = DOC_RUBY_OBJECT(node->doc); + err = rb_iv_get(doc, "@errors"); + doc_is_empty = (node->doc->children == NULL) ? 1 : 0; + node_children = node->children; + doc_children = node->doc->children; + + xmlSetStructuredErrorFunc((void *)err, noko__error_array_pusher); + + /* This function adds a fake node to the child of +node+. If the parser + * does not exit cleanly with XML_ERR_OK, the list is freed. This can + * leave the child pointers in a bad state if they were originally empty. + * + * http://git.gnome.org/browse/libxml2/tree/parser.c#n13177 + * */ + error = xmlParseInNodeContext(node, StringValuePtr(_str), + (int)RSTRING_LEN(_str), + (int)NUM2INT(_options), &list); + + /* xmlParseInNodeContext should not mutate the original document or node, + * so reassigning these pointers should be OK. The reason we're reassigning + * is because if there were errors, it's possible for the child pointers + * to be manipulated. */ + if (error != XML_ERR_OK) { + node->doc->children = doc_children; + node->children = node_children; + } + + /* make sure parent/child pointers are coherent so an unlink will work + * properly (#331) + */ + child_iter = node->doc->children ; + while (child_iter) { + child_iter->parent = (xmlNodePtr)node->doc; + child_iter = child_iter->next; + } + + xmlSetStructuredErrorFunc(NULL, NULL); + + /* + * Workaround for a libxml2 bug where a parsing error may leave a broken + * node reference in node->doc->children. + * + * https://bugzilla.gnome.org/show_bug.cgi?id=668155 + * + * This workaround is limited to when a parse error occurs, the document + * went from having no children to having children, and the context node is + * part of a document fragment. + * + * TODO: This was fixed in libxml 2.8.0 by 71a243d + */ + if (error != XML_ERR_OK && doc_is_empty && node->doc->children != NULL) { + child_iter = node; + while (child_iter->parent) { + child_iter = child_iter->parent; + } + + if (child_iter->type == XML_DOCUMENT_FRAG_NODE) { + node->doc->children = NULL; + } + } + + /* FIXME: This probably needs to handle more constants... */ + switch (error) { + case XML_ERR_INTERNAL_ERROR: + case XML_ERR_NO_MEMORY: + rb_raise(rb_eRuntimeError, "error parsing fragment (%d)", error); + break; + default: + break; + } + + set = xmlXPathNodeSetCreate(NULL); + + while (list) { + tmp = list->next; + list->next = NULL; + xmlXPathNodeSetAddUnique(set, list); + noko_xml_document_pin_node(list); + list = tmp; + } + + return noko_xml_node_set_wrap(set, doc); +} + +/* :nodoc: */ +VALUE +rb_xml_node_data_ptr_eh(VALUE self) +{ + xmlNodePtr c_node; + Noko_Node_Get_Struct(self, xmlNode, c_node); + return c_node ? Qtrue : Qfalse; +} + +VALUE +noko_xml_node_wrap(VALUE rb_class, xmlNodePtr c_node) +{ + VALUE rb_document, rb_node_cache, rb_node; + nokogiriTuplePtr node_has_a_document; + xmlDocPtr c_doc; + + assert(c_node); + + if (c_node->type == XML_DOCUMENT_NODE || c_node->type == XML_HTML_DOCUMENT_NODE) { + return DOC_RUBY_OBJECT(c_node->doc); + } + + c_doc = c_node->doc; + + // Nodes yielded from XML::Reader don't have a fully-realized Document + node_has_a_document = DOC_RUBY_OBJECT_TEST(c_doc); + + if (c_node->_private && node_has_a_document) { + return (VALUE)c_node->_private; + } + + if (!RTEST(rb_class)) { + switch (c_node->type) { + case XML_ELEMENT_NODE: + rb_class = cNokogiriXmlElement; + break; + case XML_TEXT_NODE: + rb_class = cNokogiriXmlText; + break; + case XML_ATTRIBUTE_NODE: + rb_class = cNokogiriXmlAttr; + break; + case XML_ENTITY_REF_NODE: + rb_class = cNokogiriXmlEntityReference; + break; + case XML_COMMENT_NODE: + rb_class = cNokogiriXmlComment; + break; + case XML_DOCUMENT_FRAG_NODE: + rb_class = cNokogiriXmlDocumentFragment; + break; + case XML_PI_NODE: + rb_class = cNokogiriXmlProcessingInstruction; + break; + case XML_ENTITY_DECL: + rb_class = cNokogiriXmlEntityDecl; + break; + case XML_CDATA_SECTION_NODE: + rb_class = cNokogiriXmlCData; + break; + case XML_DTD_NODE: + rb_class = cNokogiriXmlDtd; + break; + case XML_ATTRIBUTE_DECL: + rb_class = cNokogiriXmlAttributeDecl; + break; + case XML_ELEMENT_DECL: + rb_class = cNokogiriXmlElementDecl; + break; + default: + rb_class = cNokogiriXmlNode; + } + } + + rb_node = _xml_node_alloc(rb_class); + _xml_node_data_ptr_set(rb_node, c_node); + + if (node_has_a_document) { + rb_document = DOC_RUBY_OBJECT(c_doc); + rb_node_cache = DOC_NODE_CACHE(c_doc); + rb_ary_push(rb_node_cache, rb_node); + rb_funcall(rb_document, id_decorate, 1, rb_node); + } + + return rb_node ; +} + + +/* + * return Array containing the node's attributes + */ +VALUE +noko_xml_node_attrs(xmlNodePtr c_node) +{ + VALUE rb_properties = rb_ary_new(); + xmlAttrPtr c_property; + + c_property = c_node->properties ; + while (c_property != NULL) { + rb_ary_push(rb_properties, noko_xml_node_wrap(Qnil, (xmlNodePtr)c_property)); + c_property = c_property->next ; + } + + return rb_properties; +} + +void +noko_init_xml_node(void) +{ + cNokogiriXmlNode = rb_define_class_under(mNokogiriXml, "Node", rb_cObject); + + rb_define_alloc_func(cNokogiriXmlNode, _xml_node_alloc); + + rb_define_singleton_method(cNokogiriXmlNode, "new", rb_xml_node_new, -1); + + rb_define_method(cNokogiriXmlNode, "add_namespace_definition", rb_xml_node_add_namespace_definition, 2); + rb_define_method(cNokogiriXmlNode, "attribute", rb_xml_node_attribute, 1); + rb_define_method(cNokogiriXmlNode, "attribute_nodes", rb_xml_node_attribute_nodes, 0); + rb_define_method(cNokogiriXmlNode, "attribute_with_ns", rb_xml_node_attribute_with_ns, 2); + rb_define_method(cNokogiriXmlNode, "blank?", rb_xml_node_blank_eh, 0); + rb_define_method(cNokogiriXmlNode, "child", rb_xml_node_child, 0); + rb_define_method(cNokogiriXmlNode, "children", rb_xml_node_children, 0); + rb_define_method(cNokogiriXmlNode, "content", rb_xml_node_content, 0); + rb_define_method(cNokogiriXmlNode, "create_external_subset", create_external_subset, 3); + rb_define_method(cNokogiriXmlNode, "create_internal_subset", create_internal_subset, 3); + rb_define_method(cNokogiriXmlNode, "data_ptr?", rb_xml_node_data_ptr_eh, 0); + rb_define_method(cNokogiriXmlNode, "document", rb_xml_node_document, 0); + rb_define_method(cNokogiriXmlNode, "element_children", rb_xml_node_element_children, 0); + rb_define_method(cNokogiriXmlNode, "encode_special_chars", encode_special_chars, 1); + rb_define_method(cNokogiriXmlNode, "external_subset", external_subset, 0); + rb_define_method(cNokogiriXmlNode, "first_element_child", rb_xml_node_first_element_child, 0); + rb_define_method(cNokogiriXmlNode, "internal_subset", internal_subset, 0); + rb_define_method(cNokogiriXmlNode, "key?", key_eh, 1); + rb_define_method(cNokogiriXmlNode, "lang", get_lang, 0); + rb_define_method(cNokogiriXmlNode, "lang=", set_lang, 1); + rb_define_method(cNokogiriXmlNode, "last_element_child", rb_xml_node_last_element_child, 0); + rb_define_method(cNokogiriXmlNode, "line", rb_xml_node_line, 0); + rb_define_method(cNokogiriXmlNode, "line=", rb_xml_node_line_set, 1); + rb_define_method(cNokogiriXmlNode, "namespace", rb_xml_node_namespace, 0); + rb_define_method(cNokogiriXmlNode, "namespace_definitions", namespace_definitions, 0); + rb_define_method(cNokogiriXmlNode, "namespace_scopes", rb_xml_node_namespace_scopes, 0); + rb_define_method(cNokogiriXmlNode, "namespaced_key?", namespaced_key_eh, 2); + rb_define_method(cNokogiriXmlNode, "native_content=", set_native_content, 1); + rb_define_method(cNokogiriXmlNode, "next_element", next_element, 0); + rb_define_method(cNokogiriXmlNode, "next_sibling", next_sibling, 0); + rb_define_method(cNokogiriXmlNode, "node_name", get_name, 0); + rb_define_method(cNokogiriXmlNode, "node_name=", set_name, 1); + rb_define_method(cNokogiriXmlNode, "node_type", node_type, 0); + rb_define_method(cNokogiriXmlNode, "parent", get_parent, 0); + rb_define_method(cNokogiriXmlNode, "path", rb_xml_node_path, 0); + rb_define_method(cNokogiriXmlNode, "pointer_id", rb_xml_node_pointer_id, 0); + rb_define_method(cNokogiriXmlNode, "previous_element", previous_element, 0); + rb_define_method(cNokogiriXmlNode, "previous_sibling", previous_sibling, 0); + rb_define_method(cNokogiriXmlNode, "unlink", unlink_node, 0); + + rb_define_protected_method(cNokogiriXmlNode, "initialize_copy_with_args", rb_xml_node_initialize_copy_with_args, 3); + + rb_define_private_method(cNokogiriXmlNode, "add_child_node", add_child, 1); + rb_define_private_method(cNokogiriXmlNode, "add_next_sibling_node", add_next_sibling, 1); + rb_define_private_method(cNokogiriXmlNode, "add_previous_sibling_node", add_previous_sibling, 1); + rb_define_private_method(cNokogiriXmlNode, "compare", compare, 1); + rb_define_private_method(cNokogiriXmlNode, "dump_html", dump_html, 0); + rb_define_private_method(cNokogiriXmlNode, "get", get, 1); + rb_define_private_method(cNokogiriXmlNode, "in_context", in_context, 2); + rb_define_private_method(cNokogiriXmlNode, "native_write_to", native_write_to, 4); + rb_define_private_method(cNokogiriXmlNode, "prepend_newline?", rb_prepend_newline, 0); + rb_define_private_method(cNokogiriXmlNode, "html_standard_serialize", html_standard_serialize, 1); + rb_define_private_method(cNokogiriXmlNode, "process_xincludes", noko_xml_node__process_xincludes, 1); + rb_define_private_method(cNokogiriXmlNode, "replace_node", replace, 1); + rb_define_private_method(cNokogiriXmlNode, "set", set, 2); + rb_define_private_method(cNokogiriXmlNode, "set_namespace", set_namespace, 1); + + id_decorate = rb_intern("decorate"); + id_decorate_bang = rb_intern("decorate!"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node_set.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node_set.c new file mode 100644 index 000000000..de1beeb56 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_node_set.c @@ -0,0 +1,518 @@ +#include + +VALUE cNokogiriXmlNodeSet ; + +static ID decorate ; + +static void +Check_Node_Set_Node_Type(VALUE node) +{ + if (!(rb_obj_is_kind_of(node, cNokogiriXmlNode) || + rb_obj_is_kind_of(node, cNokogiriXmlNamespace))) { + rb_raise(rb_eArgError, "node must be a Nokogiri::XML::Node or Nokogiri::XML::Namespace"); + } +} + + +static +VALUE +ruby_object_get(xmlNodePtr c_node) +{ + /* see xmlElementType in libxml2 tree.h */ + switch (c_node->type) { + case XML_NAMESPACE_DECL: + /* _private is later in the namespace struct */ + return (VALUE)(((xmlNsPtr)c_node)->_private); + + case XML_DOCUMENT_NODE: + case XML_HTML_DOCUMENT_NODE: + /* in documents we use _private to store a tuple */ + if (DOC_RUBY_OBJECT_TEST(((xmlDocPtr)c_node))) { + return DOC_RUBY_OBJECT((xmlDocPtr)c_node); + } + return (VALUE)NULL; + + default: + return (VALUE)(c_node->_private); + } +} + + +static void +xml_node_set_mark(void *data) +{ + xmlNodeSetPtr node_set = data; + VALUE rb_node; + int jnode; + + for (jnode = 0; jnode < node_set->nodeNr; jnode++) { + rb_node = ruby_object_get(node_set->nodeTab[jnode]); + if (rb_node) { + rb_gc_mark(rb_node); + } + } +} + +static void +xml_node_set_deallocate(void *data) +{ + xmlNodeSetPtr node_set = data; + /* + * For reasons outlined in xml_namespace.c, here we reproduce xmlXPathFreeNodeSet() except for the + * offending call to xmlXPathNodeSetFreeNs(). + */ + if (node_set->nodeTab != NULL) { + xmlFree(node_set->nodeTab); + } + + xmlFree(node_set); +} + +static const rb_data_type_t xml_node_set_type = { + .wrap_struct_name = "xmlNodeSet", + .function = { + .dmark = xml_node_set_mark, + .dfree = xml_node_set_deallocate, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY, +}; + +static VALUE +xml_node_set_allocate(VALUE klass) +{ + return TypedData_Wrap_Struct(klass, &xml_node_set_type, xmlXPathNodeSetCreate(NULL)); +} + +/* :nodoc: */ +static VALUE +rb_xml_node_set_initialize_copy(VALUE rb_self, VALUE rb_other) +{ + xmlNodeSetPtr c_self, c_other; + VALUE rb_document; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + TypedData_Get_Struct(rb_other, xmlNodeSet, &xml_node_set_type, c_other); + + xmlXPathNodeSetMerge(c_self, c_other); + + rb_document = rb_iv_get(rb_other, "@document"); + if (!NIL_P(rb_document)) { + rb_iv_set(rb_self, "@document", rb_document); + rb_funcall(rb_document, decorate, 1, rb_self); + } + + return rb_self; +} + +static void +xpath_node_set_del(xmlNodeSetPtr cur, xmlNodePtr val) +{ + /* + * For reasons outlined in xml_namespace.c, here we reproduce xmlXPathNodeSetDel() except for the + * offending call to xmlXPathNodeSetFreeNs(). + */ + int i; + + if (cur == NULL) { return; } + if (val == NULL) { return; } + + /* + * find node in nodeTab + */ + for (i = 0; i < cur->nodeNr; i++) + if (cur->nodeTab[i] == val) { break; } + + if (i >= cur->nodeNr) { /* not found */ + return; + } + cur->nodeNr--; + for (; i < cur->nodeNr; i++) { + cur->nodeTab[i] = cur->nodeTab[i + 1]; + } + cur->nodeTab[cur->nodeNr] = NULL; +} + +/* + * call-seq: + * length + * + * Get the length of the node set + */ +static VALUE +length(VALUE rb_self) +{ + xmlNodeSetPtr c_self; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + + return c_self ? INT2NUM(c_self->nodeNr) : INT2NUM(0); +} + +/* + * call-seq: + * push(node) + * + * Append +node+ to the NodeSet. + */ +static VALUE +push(VALUE rb_self, VALUE rb_node) +{ + xmlNodeSetPtr c_self; + xmlNodePtr node; + + Check_Node_Set_Node_Type(rb_node); + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + Noko_Node_Get_Struct(rb_node, xmlNode, node); + + xmlXPathNodeSetAdd(c_self, node); + + return rb_self; +} + +/* + * call-seq: + * delete(node) + * + * Delete +node+ from the Nodeset, if it is a member. Returns the deleted node + * if found, otherwise returns nil. + */ +static VALUE +delete (VALUE rb_self, VALUE rb_node) +{ + xmlNodeSetPtr c_self; + xmlNodePtr node; + + Check_Node_Set_Node_Type(rb_node); + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + Noko_Node_Get_Struct(rb_node, xmlNode, node); + + if (xmlXPathNodeSetContains(c_self, node)) { + xpath_node_set_del(c_self, node); + return rb_node; + } + return Qnil ; +} + + +/* + * call-seq: + * &(node_set) + * + * Set Intersection — Returns a new NodeSet containing nodes common to the two NodeSets. + */ +static VALUE +intersection(VALUE rb_self, VALUE rb_other) +{ + xmlNodeSetPtr c_self, c_other ; + xmlNodeSetPtr intersection; + + if (!rb_obj_is_kind_of(rb_other, cNokogiriXmlNodeSet)) { + rb_raise(rb_eArgError, "node_set must be a Nokogiri::XML::NodeSet"); + } + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + TypedData_Get_Struct(rb_other, xmlNodeSet, &xml_node_set_type, c_other); + + intersection = xmlXPathIntersection(c_self, c_other); + return noko_xml_node_set_wrap(intersection, rb_iv_get(rb_self, "@document")); +} + + +/* + * call-seq: + * include?(node) + * + * Returns true if any member of node set equals +node+. + */ +static VALUE +include_eh(VALUE rb_self, VALUE rb_node) +{ + xmlNodeSetPtr c_self; + xmlNodePtr node; + + Check_Node_Set_Node_Type(rb_node); + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + Noko_Node_Get_Struct(rb_node, xmlNode, node); + + return (xmlXPathNodeSetContains(c_self, node) ? Qtrue : Qfalse); +} + + +/* + * call-seq: + * |(node_set) + * + * Returns a new set built by merging the set and the elements of the given + * set. + */ +static VALUE +rb_xml_node_set_union(VALUE rb_self, VALUE rb_other) +{ + xmlNodeSetPtr c_self, c_other; + xmlNodeSetPtr c_new_node_set; + + if (!rb_obj_is_kind_of(rb_other, cNokogiriXmlNodeSet)) { + rb_raise(rb_eArgError, "node_set must be a Nokogiri::XML::NodeSet"); + } + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + TypedData_Get_Struct(rb_other, xmlNodeSet, &xml_node_set_type, c_other); + + c_new_node_set = xmlXPathNodeSetMerge(NULL, c_self); + c_new_node_set = xmlXPathNodeSetMerge(c_new_node_set, c_other); + + return noko_xml_node_set_wrap(c_new_node_set, rb_iv_get(rb_self, "@document")); +} + +/* + * call-seq: + * -(node_set) + * + * Difference - returns a new NodeSet that is a copy of this NodeSet, removing + * each item that also appears in +node_set+ + */ +static VALUE +minus(VALUE rb_self, VALUE rb_other) +{ + xmlNodeSetPtr c_self, c_other; + xmlNodeSetPtr new; + int j ; + + if (!rb_obj_is_kind_of(rb_other, cNokogiriXmlNodeSet)) { + rb_raise(rb_eArgError, "node_set must be a Nokogiri::XML::NodeSet"); + } + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + TypedData_Get_Struct(rb_other, xmlNodeSet, &xml_node_set_type, c_other); + + new = xmlXPathNodeSetMerge(NULL, c_self); + for (j = 0 ; j < c_other->nodeNr ; ++j) { + xpath_node_set_del(new, c_other->nodeTab[j]); + } + + return noko_xml_node_set_wrap(new, rb_iv_get(rb_self, "@document")); +} + + +static VALUE +index_at(VALUE rb_self, long offset) +{ + xmlNodeSetPtr c_self; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + + if (offset >= c_self->nodeNr || abs((int)offset) > c_self->nodeNr) { + return Qnil; + } + + if (offset < 0) { offset += c_self->nodeNr ; } + + return noko_xml_node_wrap_node_set_result(c_self->nodeTab[offset], rb_self); +} + +static VALUE +subseq(VALUE rb_self, long beg, long len) +{ + long j; + xmlNodeSetPtr c_self; + xmlNodeSetPtr new_set ; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + + if (beg > c_self->nodeNr) { return Qnil ; } + if (beg < 0 || len < 0) { return Qnil ; } + + if ((beg + len) > c_self->nodeNr) { + len = c_self->nodeNr - beg ; + } + + new_set = xmlXPathNodeSetCreate(NULL); + for (j = beg ; j < beg + len ; ++j) { + xmlXPathNodeSetAddUnique(new_set, c_self->nodeTab[j]); + } + return noko_xml_node_set_wrap(new_set, rb_iv_get(rb_self, "@document")); +} + +/* + * call-seq: + * [index] -> Node or nil + * [start, length] -> NodeSet or nil + * [range] -> NodeSet or nil + * slice(index) -> Node or nil + * slice(start, length) -> NodeSet or nil + * slice(range) -> NodeSet or nil + * + * Element reference - returns the node at +index+, or returns a NodeSet + * containing nodes starting at +start+ and continuing for +length+ elements, or + * returns a NodeSet containing nodes specified by +range+. Negative +indices+ + * count backward from the end of the +node_set+ (-1 is the last node). Returns + * nil if the +index+ (or +start+) are out of range. + */ +static VALUE +slice(int argc, VALUE *argv, VALUE rb_self) +{ + VALUE arg ; + long beg, len ; + xmlNodeSetPtr c_self; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + + if (argc == 2) { + beg = NUM2LONG(argv[0]); + len = NUM2LONG(argv[1]); + if (beg < 0) { + beg += c_self->nodeNr ; + } + return subseq(rb_self, beg, len); + } + + if (argc != 1) { + rb_scan_args(argc, argv, "11", NULL, NULL); + } + arg = argv[0]; + + if (FIXNUM_P(arg)) { + return index_at(rb_self, FIX2LONG(arg)); + } + + /* if arg is Range */ + switch (rb_range_beg_len(arg, &beg, &len, (long)c_self->nodeNr, 0)) { + case Qfalse: + break; + case Qnil: + return Qnil; + default: + return subseq(rb_self, beg, len); + } + + return index_at(rb_self, NUM2LONG(arg)); +} + + +/* + * call-seq: + * to_a + * + * Return this list as an Array + */ +static VALUE +to_array(VALUE rb_self) +{ + xmlNodeSetPtr c_self ; + VALUE list; + int i; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + + list = rb_ary_new2(c_self->nodeNr); + for (i = 0; i < c_self->nodeNr; i++) { + VALUE elt = noko_xml_node_wrap_node_set_result(c_self->nodeTab[i], rb_self); + rb_ary_push(list, elt); + } + + return list; +} + +/* + * call-seq: + * unlink + * + * Unlink this NodeSet and all Node objects it contains from their current context. + */ +static VALUE +unlink_nodeset(VALUE rb_self) +{ + xmlNodeSetPtr c_self; + int j, nodeNr ; + + TypedData_Get_Struct(rb_self, xmlNodeSet, &xml_node_set_type, c_self); + + nodeNr = c_self->nodeNr ; + for (j = 0 ; j < nodeNr ; j++) { + if (! NOKOGIRI_NAMESPACE_EH(c_self->nodeTab[j])) { + VALUE node ; + xmlNodePtr node_ptr; + node = noko_xml_node_wrap(Qnil, c_self->nodeTab[j]); + rb_funcall(node, rb_intern("unlink"), 0); /* modifies the C struct out from under the object */ + Noko_Node_Get_Struct(node, xmlNode, node_ptr); + c_self->nodeTab[j] = node_ptr ; + } + } + return rb_self ; +} + + +VALUE +noko_xml_node_set_wrap(xmlNodeSetPtr c_node_set, VALUE document) +{ + int j; + VALUE rb_node_set ; + + if (c_node_set == NULL) { + rb_node_set = xml_node_set_allocate(cNokogiriXmlNodeSet); + } else { + rb_node_set = TypedData_Wrap_Struct(cNokogiriXmlNodeSet, &xml_node_set_type, c_node_set); + } + + if (!NIL_P(document)) { + rb_iv_set(rb_node_set, "@document", document); + rb_funcall(document, decorate, 1, rb_node_set); + } + + if (c_node_set) { + /* create ruby objects for all the results, so they'll be marked during the GC mark phase */ + for (j = 0 ; j < c_node_set->nodeNr ; j++) { + noko_xml_node_wrap_node_set_result(c_node_set->nodeTab[j], rb_node_set); + } + } + + return rb_node_set ; +} + + +VALUE +noko_xml_node_wrap_node_set_result(xmlNodePtr node, VALUE node_set) +{ + if (NOKOGIRI_NAMESPACE_EH(node)) { + return noko_xml_namespace_wrap_xpath_copy((xmlNsPtr)node); + } else { + return noko_xml_node_wrap(Qnil, node); + } +} + + +xmlNodeSetPtr +noko_xml_node_set_unwrap(VALUE rb_node_set) +{ + xmlNodeSetPtr c_node_set; + TypedData_Get_Struct(rb_node_set, xmlNodeSet, &xml_node_set_type, c_node_set); + return c_node_set; +} + + +void +noko_init_xml_node_set(void) +{ + cNokogiriXmlNodeSet = rb_define_class_under(mNokogiriXml, "NodeSet", rb_cObject); + + rb_define_alloc_func(cNokogiriXmlNodeSet, xml_node_set_allocate); + + rb_define_method(cNokogiriXmlNodeSet, "&", intersection, 1); + rb_define_method(cNokogiriXmlNodeSet, "-", minus, 1); + rb_define_method(cNokogiriXmlNodeSet, "[]", slice, -1); + rb_define_method(cNokogiriXmlNodeSet, "delete", delete, 1); + rb_define_method(cNokogiriXmlNodeSet, "include?", include_eh, 1); + rb_define_method(cNokogiriXmlNodeSet, "length", length, 0); + rb_define_method(cNokogiriXmlNodeSet, "push", push, 1); + rb_define_method(cNokogiriXmlNodeSet, "slice", slice, -1); + rb_define_method(cNokogiriXmlNodeSet, "to_a", to_array, 0); + rb_define_method(cNokogiriXmlNodeSet, "unlink", unlink_nodeset, 0); + rb_define_method(cNokogiriXmlNodeSet, "|", rb_xml_node_set_union, 1); + + rb_define_private_method(cNokogiriXmlNodeSet, "initialize_copy", rb_xml_node_set_initialize_copy, 1); + + decorate = rb_intern("decorate"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_processing_instruction.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_processing_instruction.c new file mode 100644 index 000000000..6bcf15f19 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_processing_instruction.c @@ -0,0 +1,54 @@ +#include + +VALUE cNokogiriXmlProcessingInstruction; + +/* + * call-seq: + * new(document, name, content) + * + * Create a new ProcessingInstruction element on the +document+ with +name+ + * and +content+ + */ +static VALUE +new (int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr xml_doc; + xmlNodePtr node; + VALUE document; + VALUE name; + VALUE content; + VALUE rest; + VALUE rb_node; + + rb_scan_args(argc, argv, "3*", &document, &name, &content, &rest); + + xml_doc = noko_xml_document_unwrap(document); + + node = xmlNewDocPI( + xml_doc, + (const xmlChar *)StringValueCStr(name), + (const xmlChar *)StringValueCStr(content) + ); + + noko_xml_document_pin_node(node); + + rb_node = noko_xml_node_wrap(klass, node); + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { rb_yield(rb_node); } + + return rb_node; +} + +void +noko_init_xml_processing_instruction(void) +{ + assert(cNokogiriXmlNode); + /* + * ProcessingInstruction represents a ProcessingInstruction node in an xml + * document. + */ + cNokogiriXmlProcessingInstruction = rb_define_class_under(mNokogiriXml, "ProcessingInstruction", cNokogiriXmlNode); + + rb_define_singleton_method(cNokogiriXmlProcessingInstruction, "new", new, -1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_reader.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_reader.c new file mode 100644 index 000000000..aa8574938 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_reader.c @@ -0,0 +1,777 @@ +#include + +VALUE cNokogiriXmlReader; + +static void +xml_reader_deallocate(void *data) +{ + // free the document separately because we _may_ have triggered preservation by calling + // xmlTextReaderCurrentDoc during a read_more. + xmlTextReaderPtr reader = data; + xmlDocPtr doc = xmlTextReaderCurrentDoc(reader); + xmlFreeTextReader(reader); + if (doc) { + xmlFreeDoc(doc); + } +} + +static const rb_data_type_t xml_text_reader_type = { + .wrap_struct_name = "xmlTextReader", + .function = { + .dfree = xml_reader_deallocate, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +static int +has_attributes(xmlTextReaderPtr reader) +{ + /* + * this implementation of xmlTextReaderHasAttributes explicitly includes + * namespaces and properties, because some earlier versions ignore + * namespaces. + */ + xmlNodePtr node ; + node = xmlTextReaderCurrentNode(reader); + if (node == NULL) { + return (0); + } + + if ((node->type == XML_ELEMENT_NODE) && + ((node->properties != NULL) || (node->nsDef != NULL))) { + return (1); + } + return (0); +} + +// TODO: merge this function into the `namespaces` method implementation +static void +Nokogiri_xml_node_namespaces(xmlNodePtr node, VALUE attr_hash) +{ + xmlNsPtr ns; + VALUE key; + + if (node->type != XML_ELEMENT_NODE) { return ; } + + ns = node->nsDef; + while (ns != NULL) { + + key = rb_enc_str_new_cstr(XMLNS_PREFIX, rb_utf8_encoding()); + if (ns->prefix) { + rb_str_cat_cstr(key, ":"); + rb_str_cat_cstr(key, (const char *)ns->prefix); + } + + key = rb_str_conv_enc(key, rb_utf8_encoding(), rb_default_internal_encoding()); + rb_hash_aset(attr_hash, + key, + (ns->href ? NOKOGIRI_STR_NEW2(ns->href) : Qnil) + ); + ns = ns->next ; + } +} + + +/* + * call-seq: + * default? + * + * Was an attribute generated from the default value in the DTD or schema? + */ +static VALUE +default_eh(VALUE self) +{ + xmlTextReaderPtr reader; + int eh; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + eh = xmlTextReaderIsDefault(reader); + if (eh == 0) { return Qfalse; } + if (eh == 1) { return Qtrue; } + + return Qnil; +} + +/* + * call-seq: + * value? + * + * Does this node have a text value? + */ +static VALUE +value_eh(VALUE self) +{ + xmlTextReaderPtr reader; + int eh; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + eh = xmlTextReaderHasValue(reader); + if (eh == 0) { return Qfalse; } + if (eh == 1) { return Qtrue; } + + return Qnil; +} + +/* + * call-seq: + * attributes? + * + * Does this node have attributes? + */ +static VALUE +attributes_eh(VALUE self) +{ + xmlTextReaderPtr reader; + int eh; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + eh = has_attributes(reader); + if (eh == 0) { return Qfalse; } + if (eh == 1) { return Qtrue; } + + return Qnil; +} + +/* + * call-seq: + * namespaces + * + * Get a hash of namespaces for this Node + */ +static VALUE +rb_xml_reader_namespaces(VALUE rb_reader) +{ + VALUE rb_namespaces = rb_hash_new() ; + xmlTextReaderPtr c_reader; + xmlNodePtr c_node; + VALUE rb_errors; + + TypedData_Get_Struct(rb_reader, xmlTextReader, &xml_text_reader_type, c_reader); + + if (! has_attributes(c_reader)) { + return rb_namespaces ; + } + + rb_errors = rb_funcall(rb_reader, rb_intern("errors"), 0); + + xmlSetStructuredErrorFunc((void *)rb_errors, noko__error_array_pusher); + c_node = xmlTextReaderExpand(c_reader); + xmlSetStructuredErrorFunc(NULL, NULL); + + if (c_node == NULL) { + if (RARRAY_LEN(rb_errors) > 0) { + VALUE rb_error = rb_ary_entry(rb_errors, 0); + VALUE exception_message = rb_funcall(rb_error, rb_intern("to_s"), 0); + rb_exc_raise(rb_class_new_instance(1, &exception_message, cNokogiriXmlSyntaxError)); + } + return Qnil; + } + + Nokogiri_xml_node_namespaces(c_node, rb_namespaces); + + return rb_namespaces ; +} + +/* + :call-seq: attribute_hash() → Hash + + Get the attributes of the current node as a Hash of names and values. + + See related: #attributes and #namespaces + */ +static VALUE +rb_xml_reader_attribute_hash(VALUE rb_reader) +{ + VALUE rb_attributes = rb_hash_new(); + xmlTextReaderPtr c_reader; + xmlNodePtr c_node; + xmlAttrPtr c_property; + VALUE rb_errors; + + TypedData_Get_Struct(rb_reader, xmlTextReader, &xml_text_reader_type, c_reader); + + if (!has_attributes(c_reader)) { + return rb_attributes; + } + + rb_errors = rb_funcall(rb_reader, rb_intern("errors"), 0); + + xmlSetStructuredErrorFunc((void *)rb_errors, noko__error_array_pusher); + c_node = xmlTextReaderExpand(c_reader); + xmlSetStructuredErrorFunc(NULL, NULL); + + if (c_node == NULL) { + if (RARRAY_LEN(rb_errors) > 0) { + VALUE rb_error = rb_ary_entry(rb_errors, 0); + VALUE exception_message = rb_funcall(rb_error, rb_intern("to_s"), 0); + rb_exc_raise(rb_class_new_instance(1, &exception_message, cNokogiriXmlSyntaxError)); + } + return Qnil; + } + + c_property = c_node->properties; + while (c_property != NULL) { + VALUE rb_name = NOKOGIRI_STR_NEW2(c_property->name); + VALUE rb_value = Qnil; + xmlChar *c_value = xmlNodeGetContent((xmlNode *)c_property); + + if (c_value) { + rb_value = NOKOGIRI_STR_NEW2(c_value); + xmlFree(c_value); + } + + rb_hash_aset(rb_attributes, rb_name, rb_value); + + c_property = c_property->next; + } + + return rb_attributes; +} + +/* + * call-seq: + * attribute_at(index) + * + * Get the value of attribute at +index+ + */ +static VALUE +attribute_at(VALUE self, VALUE index) +{ + xmlTextReaderPtr reader; + xmlChar *value; + VALUE rb_value; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + + if (NIL_P(index)) { return Qnil; } + index = rb_Integer(index); + + value = xmlTextReaderGetAttributeNo( + reader, + (int)NUM2INT(index) + ); + if (value == NULL) { return Qnil; } + + rb_value = NOKOGIRI_STR_NEW2(value); + xmlFree(value); + return rb_value; +} + +/* + * call-seq: + * attribute(name) + * + * Get the value of attribute named +name+ + */ +static VALUE +reader_attribute(VALUE self, VALUE name) +{ + xmlTextReaderPtr reader; + xmlChar *value ; + VALUE rb_value; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + + if (NIL_P(name)) { return Qnil; } + name = StringValue(name) ; + + value = xmlTextReaderGetAttribute(reader, (xmlChar *)StringValueCStr(name)); + if (value == NULL) { return Qnil; } + + rb_value = NOKOGIRI_STR_NEW2(value); + xmlFree(value); + return rb_value; +} + +/* + * call-seq: + * attribute_count + * + * Get the number of attributes for the current node + */ +static VALUE +attribute_count(VALUE self) +{ + xmlTextReaderPtr reader; + int count; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + count = xmlTextReaderAttributeCount(reader); + if (count == -1) { return Qnil; } + + return INT2NUM(count); +} + +/* + * call-seq: + * depth + * + * Get the depth of the node + */ +static VALUE +depth(VALUE self) +{ + xmlTextReaderPtr reader; + int depth; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + depth = xmlTextReaderDepth(reader); + if (depth == -1) { return Qnil; } + + return INT2NUM(depth); +} + +/* + * call-seq: + * xml_version + * + * Get the XML version of the document being read + */ +static VALUE +xml_version(VALUE self) +{ + xmlTextReaderPtr reader; + const char *version; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + version = (const char *)xmlTextReaderConstXmlVersion(reader); + if (version == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(version); +} + +/* + * call-seq: + * lang + * + * Get the xml:lang scope within which the node resides. + */ +static VALUE +lang(VALUE self) +{ + xmlTextReaderPtr reader; + const char *lang; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + lang = (const char *)xmlTextReaderConstXmlLang(reader); + if (lang == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(lang); +} + +/* + * call-seq: + * value + * + * Get the text value of the node if present. Returns a utf-8 encoded string. + */ +static VALUE +value(VALUE self) +{ + xmlTextReaderPtr reader; + const char *value; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + value = (const char *)xmlTextReaderConstValue(reader); + if (value == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(value); +} + +/* + * call-seq: + * prefix + * + * Get the shorthand reference to the namespace associated with the node. + */ +static VALUE +prefix(VALUE self) +{ + xmlTextReaderPtr reader; + const char *prefix; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + prefix = (const char *)xmlTextReaderConstPrefix(reader); + if (prefix == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(prefix); +} + +/* + * call-seq: + * namespace_uri + * + * Get the URI defining the namespace associated with the node + */ +static VALUE +namespace_uri(VALUE self) +{ + xmlTextReaderPtr reader; + const char *uri; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + uri = (const char *)xmlTextReaderConstNamespaceUri(reader); + if (uri == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(uri); +} + +/* + * call-seq: + * local_name + * + * Get the local name of the node + */ +static VALUE +local_name(VALUE self) +{ + xmlTextReaderPtr reader; + const char *name; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + name = (const char *)xmlTextReaderConstLocalName(reader); + if (name == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(name); +} + +/* + * call-seq: + * name + * + * Get the name of the node. Returns a utf-8 encoded string. + */ +static VALUE +name(VALUE self) +{ + xmlTextReaderPtr reader; + const char *name; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + name = (const char *)xmlTextReaderConstName(reader); + if (name == NULL) { return Qnil; } + + return NOKOGIRI_STR_NEW2(name); +} + +/* + * call-seq: + * base_uri + * + * Get the xml:base of the node + */ +static VALUE +rb_xml_reader_base_uri(VALUE rb_reader) +{ + VALUE rb_base_uri; + xmlTextReaderPtr c_reader; + xmlChar *c_base_uri; + + TypedData_Get_Struct(rb_reader, xmlTextReader, &xml_text_reader_type, c_reader); + + c_base_uri = xmlTextReaderBaseUri(c_reader); + if (c_base_uri == NULL) { + return Qnil; + } + + rb_base_uri = NOKOGIRI_STR_NEW2(c_base_uri); + xmlFree(c_base_uri); + + return rb_base_uri; +} + +/* + * call-seq: + * state + * + * Get the state of the reader + */ +static VALUE +state(VALUE self) +{ + xmlTextReaderPtr reader; + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + return INT2NUM(xmlTextReaderReadState(reader)); +} + +/* + * call-seq: + * node_type + * + * Get the type of readers current node + */ +static VALUE +node_type(VALUE self) +{ + xmlTextReaderPtr reader; + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + return INT2NUM(xmlTextReaderNodeType(reader)); +} + +/* + * call-seq: + * read + * + * Move the Reader forward through the XML document. + */ +static VALUE +read_more(VALUE rb_reader) +{ + xmlTextReaderPtr c_reader; + libxmlStructuredErrorHandlerState handler_state; + + TypedData_Get_Struct(rb_reader, xmlTextReader, &xml_text_reader_type, c_reader); + + VALUE rb_errors = rb_funcall(rb_reader, rb_intern("errors"), 0); + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + + int status = xmlTextReaderRead(c_reader); + + noko__structured_error_func_restore(&handler_state); + + xmlDocPtr c_document = xmlTextReaderCurrentDoc(c_reader); + if (c_document && c_document->encoding == NULL) { + VALUE constructor_encoding = rb_iv_get(rb_reader, "@encoding"); + if (RTEST(constructor_encoding)) { + c_document->encoding = xmlStrdup(BAD_CAST StringValueCStr(constructor_encoding)); + } else { + rb_iv_set(rb_reader, "@encoding", NOKOGIRI_STR_NEW2("UTF-8")); + c_document->encoding = xmlStrdup(BAD_CAST "UTF-8"); + } + } + + if (status == 1) { return rb_reader; } + if (status == 0) { return Qnil; } + + /* if we're here, there was an error */ + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Error pulling: %d", status); + } +} + +/* + * call-seq: + * inner_xml + * + * Read the contents of the current node, including child nodes and markup. + * Returns a utf-8 encoded string. + */ +static VALUE +inner_xml(VALUE self) +{ + xmlTextReaderPtr reader; + xmlChar *value; + VALUE str; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + + value = xmlTextReaderReadInnerXml(reader); + + str = Qnil; + if (value) { + str = NOKOGIRI_STR_NEW2((char *)value); + xmlFree(value); + } + + return str; +} + +/* + * call-seq: + * outer_xml + * + * Read the current node and its contents, including child nodes and markup. + * Returns a utf-8 encoded string. + */ +static VALUE +outer_xml(VALUE self) +{ + xmlTextReaderPtr reader; + xmlChar *value; + VALUE str = Qnil; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + + value = xmlTextReaderReadOuterXml(reader); + + if (value) { + str = NOKOGIRI_STR_NEW2((char *)value); + xmlFree(value); + } + return str; +} + +/* + * call-seq: + * from_memory(string, url = nil, encoding = nil, options = 0) + * + * Create a new Reader to parse a String. + */ +static VALUE +from_memory(int argc, VALUE *argv, VALUE klass) +{ + /* TODO: deprecate this method, since Reader.new can handle both memory and IO. It can then + * become private. */ + VALUE rb_buffer, rb_url, encoding, rb_options; + xmlTextReaderPtr reader; + const char *c_url = NULL; + const char *c_encoding = NULL; + int c_options = 0; + VALUE rb_reader, args[3]; + + rb_scan_args(argc, argv, "13", &rb_buffer, &rb_url, &encoding, &rb_options); + + if (!RTEST(rb_buffer)) { rb_raise(rb_eArgError, "string cannot be nil"); } + if (RTEST(rb_url)) { c_url = StringValueCStr(rb_url); } + if (RTEST(encoding)) { c_encoding = StringValueCStr(encoding); } + if (RTEST(rb_options)) { c_options = (int)NUM2INT(rb_options); } + + reader = xmlReaderForMemory( + StringValuePtr(rb_buffer), + (int)RSTRING_LEN(rb_buffer), + c_url, + c_encoding, + c_options + ); + + if (reader == NULL) { + xmlFreeTextReader(reader); + rb_raise(rb_eRuntimeError, "couldn't create a parser"); + } + + rb_reader = TypedData_Wrap_Struct(klass, &xml_text_reader_type, reader); + args[0] = rb_buffer; + args[1] = rb_url; + args[2] = encoding; + rb_obj_call_init(rb_reader, 3, args); + + return rb_reader; +} + +/* + * call-seq: + * from_io(io, url = nil, encoding = nil, options = 0) + * + * Create a new Reader to parse an IO stream. + */ +static VALUE +from_io(int argc, VALUE *argv, VALUE klass) +{ + /* TODO: deprecate this method, since Reader.new can handle both memory and IO. It can then + * become private. */ + VALUE rb_io, rb_url, encoding, rb_options; + xmlTextReaderPtr reader; + const char *c_url = NULL; + const char *c_encoding = NULL; + int c_options = 0; + VALUE rb_reader, args[3]; + + rb_scan_args(argc, argv, "13", &rb_io, &rb_url, &encoding, &rb_options); + + if (!RTEST(rb_io)) { rb_raise(rb_eArgError, "io cannot be nil"); } + if (RTEST(rb_url)) { c_url = StringValueCStr(rb_url); } + if (RTEST(encoding)) { c_encoding = StringValueCStr(encoding); } + if (RTEST(rb_options)) { c_options = (int)NUM2INT(rb_options); } + + reader = xmlReaderForIO( + (xmlInputReadCallback)noko_io_read, + (xmlInputCloseCallback)noko_io_close, + (void *)rb_io, + c_url, + c_encoding, + c_options + ); + + if (reader == NULL) { + xmlFreeTextReader(reader); + rb_raise(rb_eRuntimeError, "couldn't create a parser"); + } + + rb_reader = TypedData_Wrap_Struct(klass, &xml_text_reader_type, reader); + args[0] = rb_io; + args[1] = rb_url; + args[2] = encoding; + rb_obj_call_init(rb_reader, 3, args); + + return rb_reader; +} + +/* + * call-seq: + * reader.empty_element? # => true or false + * + * Returns true if the current node is empty, otherwise false. + */ +static VALUE +empty_element_p(VALUE self) +{ + xmlTextReaderPtr reader; + + TypedData_Get_Struct(self, xmlTextReader, &xml_text_reader_type, reader); + + if (xmlTextReaderIsEmptyElement(reader)) { + return Qtrue; + } + + return Qfalse; +} + +static VALUE +rb_xml_reader_encoding(VALUE rb_reader) +{ + xmlTextReaderPtr c_reader; + const char *parser_encoding; + VALUE constructor_encoding; + + TypedData_Get_Struct(rb_reader, xmlTextReader, &xml_text_reader_type, c_reader); + parser_encoding = (const char *)xmlTextReaderConstEncoding(c_reader); + if (parser_encoding) { + return NOKOGIRI_STR_NEW2(parser_encoding); + } + + constructor_encoding = rb_iv_get(rb_reader, "@encoding"); + if (RTEST(constructor_encoding)) { + return constructor_encoding; + } + + return Qnil; +} + +void +noko_init_xml_reader(void) +{ + cNokogiriXmlReader = rb_define_class_under(mNokogiriXml, "Reader", rb_cObject); + + rb_undef_alloc_func(cNokogiriXmlReader); + + rb_define_singleton_method(cNokogiriXmlReader, "from_memory", from_memory, -1); + rb_define_singleton_method(cNokogiriXmlReader, "from_io", from_io, -1); + + rb_define_method(cNokogiriXmlReader, "attribute", reader_attribute, 1); + rb_define_method(cNokogiriXmlReader, "attribute_at", attribute_at, 1); + rb_define_method(cNokogiriXmlReader, "attribute_count", attribute_count, 0); + rb_define_method(cNokogiriXmlReader, "attribute_hash", rb_xml_reader_attribute_hash, 0); + rb_define_method(cNokogiriXmlReader, "attributes?", attributes_eh, 0); + rb_define_method(cNokogiriXmlReader, "base_uri", rb_xml_reader_base_uri, 0); + rb_define_method(cNokogiriXmlReader, "default?", default_eh, 0); + rb_define_method(cNokogiriXmlReader, "depth", depth, 0); + rb_define_method(cNokogiriXmlReader, "empty_element?", empty_element_p, 0); + rb_define_method(cNokogiriXmlReader, "encoding", rb_xml_reader_encoding, 0); + rb_define_method(cNokogiriXmlReader, "inner_xml", inner_xml, 0); + rb_define_method(cNokogiriXmlReader, "lang", lang, 0); + rb_define_method(cNokogiriXmlReader, "local_name", local_name, 0); + rb_define_method(cNokogiriXmlReader, "name", name, 0); + rb_define_method(cNokogiriXmlReader, "namespace_uri", namespace_uri, 0); + rb_define_method(cNokogiriXmlReader, "namespaces", rb_xml_reader_namespaces, 0); + rb_define_method(cNokogiriXmlReader, "node_type", node_type, 0); + rb_define_method(cNokogiriXmlReader, "outer_xml", outer_xml, 0); + rb_define_method(cNokogiriXmlReader, "prefix", prefix, 0); + rb_define_method(cNokogiriXmlReader, "read", read_more, 0); + rb_define_method(cNokogiriXmlReader, "state", state, 0); + rb_define_method(cNokogiriXmlReader, "value", value, 0); + rb_define_method(cNokogiriXmlReader, "value?", value_eh, 0); + rb_define_method(cNokogiriXmlReader, "xml_version", xml_version, 0); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_relax_ng.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_relax_ng.c new file mode 100644 index 000000000..07b0567a9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_relax_ng.c @@ -0,0 +1,149 @@ +#include + +VALUE cNokogiriXmlRelaxNG; + +static void +_noko_xml_relax_ng_deallocate(void *data) +{ + xmlRelaxNGPtr schema = data; + xmlRelaxNGFree(schema); +} + +static const rb_data_type_t xml_relax_ng_type = { + .wrap_struct_name = "xmlRelaxNG", + .function = { + .dfree = _noko_xml_relax_ng_deallocate, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +static VALUE +noko_xml_relax_ng__validate_document(VALUE self, VALUE document) +{ + xmlDocPtr doc; + xmlRelaxNGPtr schema; + VALUE errors; + xmlRelaxNGValidCtxtPtr valid_ctxt; + + TypedData_Get_Struct(self, xmlRelaxNG, &xml_relax_ng_type, schema); + doc = noko_xml_document_unwrap(document); + + errors = rb_ary_new(); + + valid_ctxt = xmlRelaxNGNewValidCtxt(schema); + + if (NULL == valid_ctxt) { + /* we have a problem */ + rb_raise(rb_eRuntimeError, "Could not create a validation context"); + } + + xmlRelaxNGSetValidStructuredErrors( + valid_ctxt, + noko__error_array_pusher, + (void *)errors + ); + + xmlRelaxNGValidateDoc(valid_ctxt, doc); + + xmlRelaxNGFreeValidCtxt(valid_ctxt); + + return errors; +} + +static VALUE +_noko_xml_relax_ng_parse_schema( + VALUE rb_class, + xmlRelaxNGParserCtxtPtr c_parser_context, + VALUE rb_parse_options +) +{ + VALUE rb_errors; + VALUE rb_schema; + xmlRelaxNGPtr c_schema; + libxmlStructuredErrorHandlerState handler_state; + + if (NIL_P(rb_parse_options)) { + rb_parse_options = rb_const_get_at( + rb_const_get_at(mNokogiriXml, rb_intern("ParseOptions")), + rb_intern("DEFAULT_SCHEMA") + ); + } + + rb_errors = rb_ary_new(); + + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + xmlRelaxNGSetParserStructuredErrors( + c_parser_context, + noko__error_array_pusher, + (void *)rb_errors + ); + + c_schema = xmlRelaxNGParse(c_parser_context); + + xmlRelaxNGFreeParserCtxt(c_parser_context); + noko__structured_error_func_restore(&handler_state); + + if (NULL == c_schema) { + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Could not parse document"); + } + } + + rb_schema = TypedData_Wrap_Struct(rb_class, &xml_relax_ng_type, c_schema); + rb_iv_set(rb_schema, "@errors", rb_errors); + rb_iv_set(rb_schema, "@parse_options", rb_parse_options); + + return rb_schema; +} + +/* + * :call-seq: + * from_document(document) → Nokogiri::XML::RelaxNG + * from_document(document, parse_options) → Nokogiri::XML::RelaxNG + * + * Parse a RELAX NG schema definition from a Document to create a new Nokogiri::XML::RelaxNG. + * + * [Parameters] + * - +document+ (XML::Document) A document containing the RELAX NG schema definition + * - +parse_options+ (Nokogiri::XML::ParseOptions) + * Defaults to ParseOptions::DEFAULT_SCHEMA ⚠ Unused + * + * [Returns] Nokogiri::XML::RelaxNG + * + * ⚠ +parse_options+ is currently unused by this method and is present only as a placeholder for + * future functionality. + */ +static VALUE +noko_xml_relax_ng_s_from_document(int argc, VALUE *argv, VALUE rb_class) +{ + /* TODO: deprecate this method and put file-or-string logic into .new so that becomes the + * preferred entry point, and this can become a private method */ + VALUE rb_document; + VALUE rb_parse_options; + xmlDocPtr c_document; + xmlRelaxNGParserCtxtPtr c_parser_context; + + rb_scan_args(argc, argv, "11", &rb_document, &rb_parse_options); + + c_document = noko_xml_document_unwrap(rb_document); + c_document = c_document->doc; /* In case someone passes us a node. ugh. */ + + c_parser_context = xmlRelaxNGNewDocParserCtxt(c_document); + + return _noko_xml_relax_ng_parse_schema(rb_class, c_parser_context, rb_parse_options); +} + +void +noko_init_xml_relax_ng(void) +{ + assert(cNokogiriXmlSchema); + cNokogiriXmlRelaxNG = rb_define_class_under(mNokogiriXml, "RelaxNG", cNokogiriXmlSchema); + + rb_define_singleton_method(cNokogiriXmlRelaxNG, "from_document", noko_xml_relax_ng_s_from_document, -1); + + rb_define_private_method(cNokogiriXmlRelaxNG, "validate_document", noko_xml_relax_ng__validate_document, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser.c new file mode 100644 index 000000000..ec2d2ade4 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser.c @@ -0,0 +1,403 @@ +#include + +VALUE cNokogiriXmlSaxParser ; + +static ID id_start_document; +static ID id_end_document; +static ID id_start_element; +static ID id_end_element; +static ID id_start_element_namespace; +static ID id_end_element_namespace; +static ID id_comment; +static ID id_characters; +static ID id_xmldecl; +static ID id_error; +static ID id_warning; +static ID id_cdata_block; +static ID id_processing_instruction; +static ID id_reference; + +static size_t +xml_sax_parser_memsize(const void *data) +{ + return sizeof(xmlSAXHandler); +} + +/* Used by Nokogiri::XML::SAX::Parser and Nokogiri::HTML::SAX::Parser */ +static const rb_data_type_t xml_sax_parser_type = { + .wrap_struct_name = "xmlSAXHandler", + .function = { + .dfree = RUBY_TYPED_DEFAULT_FREE, + .dsize = xml_sax_parser_memsize + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED +}; + +static void +noko_xml_sax_parser_start_document_callback(void *ctx) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + xmlSAX2StartDocument(ctx); + + if (ctxt->standalone != -1) { /* -1 means there was no declaration */ + VALUE encoding = Qnil ; + VALUE standalone = Qnil; + VALUE version; + + if (ctxt->encoding) { + encoding = NOKOGIRI_STR_NEW2(ctxt->encoding) ; + } else if (ctxt->input && ctxt->input->encoding) { // unnecessary after v2.12.0 / gnome/libxml2@ec7be506 + encoding = NOKOGIRI_STR_NEW2(ctxt->input->encoding) ; + } + + version = ctxt->version ? NOKOGIRI_STR_NEW2(ctxt->version) : Qnil; + + /* TODO try using xmlSAX2IsStandalone */ + switch (ctxt->standalone) { + case 0: + standalone = NOKOGIRI_STR_NEW2("no"); + break; + case 1: + standalone = NOKOGIRI_STR_NEW2("yes"); + break; + } + + rb_funcall(doc, id_xmldecl, 3, version, encoding, standalone); + } + + rb_funcall(doc, id_start_document, 0); +} + +static void +noko_xml_sax_parser_end_document_callback(void *ctx) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + rb_funcall(doc, id_end_document, 0); +} + +static void +noko_xml_sax_parser_start_element_callback(void *ctx, const xmlChar *name, const xmlChar **atts) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE attributes = rb_ary_new(); + const xmlChar *attr; + int i = 0; + if (atts) { + while ((attr = atts[i]) != NULL) { + const xmlChar *val = atts[i + 1]; + VALUE value = val != NULL ? NOKOGIRI_STR_NEW2(val) : Qnil; + rb_ary_push(attributes, rb_ary_new3(2, NOKOGIRI_STR_NEW2(attr), value)); + i += 2; + } + } + + rb_funcall(doc, + id_start_element, + 2, + NOKOGIRI_STR_NEW2(name), + attributes + ); +} + +static void +noko_xml_sax_parser_end_element_callback(void *ctx, const xmlChar *name) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + rb_funcall(doc, id_end_element, 1, NOKOGIRI_STR_NEW2(name)); +} + +static VALUE +xml_sax_parser_marshal_attributes(int attributes_len, const xmlChar **c_attributes) +{ + VALUE rb_array = rb_ary_new2((long)attributes_len); + VALUE cNokogiriXmlSaxParserAttribute; + + cNokogiriXmlSaxParserAttribute = rb_const_get_at(cNokogiriXmlSaxParser, rb_intern("Attribute")); + if (c_attributes) { + /* Each attribute is an array of [localname, prefix, URI, value, end] */ + int i; + for (i = 0; i < attributes_len * 5; i += 5) { + VALUE rb_constructor_args[4], rb_attribute; + + rb_constructor_args[0] = RBSTR_OR_QNIL(c_attributes[i + 0]); /* localname */ + rb_constructor_args[1] = RBSTR_OR_QNIL(c_attributes[i + 1]); /* prefix */ + rb_constructor_args[2] = RBSTR_OR_QNIL(c_attributes[i + 2]); /* URI */ + + /* value */ + rb_constructor_args[3] = NOKOGIRI_STR_NEW((const char *)c_attributes[i + 3], + (c_attributes[i + 4] - c_attributes[i + 3])); + + rb_attribute = rb_class_new_instance(4, rb_constructor_args, cNokogiriXmlSaxParserAttribute); + rb_ary_push(rb_array, rb_attribute); + } + } + + return rb_array; +} + +static void +noko_xml_sax_parser_start_element_ns_callback( + void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *uri, + int nb_namespaces, + const xmlChar **namespaces, + int nb_attributes, + int nb_defaulted, + const xmlChar **attributes) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE attribute_ary = xml_sax_parser_marshal_attributes(nb_attributes, attributes); + + VALUE ns_list = rb_ary_new2((long)nb_namespaces); + + if (namespaces) { + int i; + for (i = 0; i < nb_namespaces * 2; i += 2) { + rb_ary_push(ns_list, + rb_ary_new3((long)2, + RBSTR_OR_QNIL(namespaces[i + 0]), + RBSTR_OR_QNIL(namespaces[i + 1]) + ) + ); + } + } + + rb_funcall(doc, + id_start_element_namespace, + 5, + NOKOGIRI_STR_NEW2(localname), + attribute_ary, + RBSTR_OR_QNIL(prefix), + RBSTR_OR_QNIL(uri), + ns_list + ); +} + +/** + * end_element_ns was borrowed heavily from libxml-ruby. + */ +static void +noko_xml_sax_parser_end_element_ns_callback( + void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *uri) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + rb_funcall(doc, id_end_element_namespace, 3, + NOKOGIRI_STR_NEW2(localname), + RBSTR_OR_QNIL(prefix), + RBSTR_OR_QNIL(uri) + ); +} + +static void +noko_xml_sax_parser_characters_callback(void *ctx, const xmlChar *ch, int len) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE str = NOKOGIRI_STR_NEW(ch, len); + rb_funcall(doc, id_characters, 1, str); +} + +static void +noko_xml_sax_parser_comment_callback(void *ctx, const xmlChar *value) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE str = NOKOGIRI_STR_NEW2(value); + rb_funcall(doc, id_comment, 1, str); +} + +PRINTFLIKE_DECL(2, 3) +static void +noko_xml_sax_parser_warning_callback(void *ctx, const char *msg, ...) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE rb_message; + +#ifdef TRUFFLERUBY_NOKOGIRI_SYSTEM_LIBRARIES + /* It is not currently possible to pass var args from native + functions to sulong, so we work around the issue here. */ + rb_message = rb_sprintf("warning_func: %s", msg); +#else + va_list args; + va_start(args, msg); + rb_message = rb_vsprintf(msg, args); + va_end(args); +#endif + + rb_funcall(doc, id_warning, 1, rb_message); +} + +PRINTFLIKE_DECL(2, 3) +static void +noko_xml_sax_parser_error_callback(void *ctx, const char *msg, ...) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE rb_message; + +#ifdef TRUFFLERUBY_NOKOGIRI_SYSTEM_LIBRARIES + /* It is not currently possible to pass var args from native + functions to sulong, so we work around the issue here. */ + rb_message = rb_sprintf("error_func: %s", msg); +#else + va_list args; + va_start(args, msg); + rb_message = rb_vsprintf(msg, args); + va_end(args); +#endif + + rb_funcall(doc, id_error, 1, rb_message); +} + +static void +noko_xml_sax_parser_cdata_block_callback(void *ctx, const xmlChar *value, int len) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE string = NOKOGIRI_STR_NEW(value, len); + rb_funcall(doc, id_cdata_block, 1, string); +} + +static void +noko_xml_sax_parser_processing_instruction_callback(void *ctx, const xmlChar *name, const xmlChar *content) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + VALUE rb_content = content ? NOKOGIRI_STR_NEW2(content) : Qnil; + + rb_funcall(doc, + id_processing_instruction, + 2, + NOKOGIRI_STR_NEW2(name), + rb_content + ); +} + +static void +noko_xml_sax_parser_reference_callback(void *ctx, const xmlChar *name) +{ + xmlParserCtxtPtr ctxt = (xmlParserCtxtPtr)ctx; + xmlEntityPtr entity = xmlSAX2GetEntity(ctxt, name); + + VALUE self = (VALUE)ctxt->_private; + VALUE doc = rb_iv_get(self, "@document"); + + if (entity && entity->content) { + rb_funcall(doc, id_reference, 2, NOKOGIRI_STR_NEW2(entity->name), NOKOGIRI_STR_NEW2(entity->content)); + } else { + rb_funcall(doc, id_reference, 2, NOKOGIRI_STR_NEW2(name), Qnil); + } +} + +static VALUE +noko_xml_sax_parser__initialize_native(VALUE self) +{ + xmlSAXHandlerPtr handler = noko_xml_sax_parser_unwrap(self); + + handler->startDocument = noko_xml_sax_parser_start_document_callback; + handler->endDocument = noko_xml_sax_parser_end_document_callback; + handler->startElement = noko_xml_sax_parser_start_element_callback; + handler->endElement = noko_xml_sax_parser_end_element_callback; + handler->startElementNs = noko_xml_sax_parser_start_element_ns_callback; + handler->endElementNs = noko_xml_sax_parser_end_element_ns_callback; + handler->characters = noko_xml_sax_parser_characters_callback; + handler->comment = noko_xml_sax_parser_comment_callback; + handler->warning = noko_xml_sax_parser_warning_callback; + handler->error = noko_xml_sax_parser_error_callback; + handler->cdataBlock = noko_xml_sax_parser_cdata_block_callback; + handler->processingInstruction = noko_xml_sax_parser_processing_instruction_callback; + handler->reference = noko_xml_sax_parser_reference_callback; + + /* use some of libxml2's default callbacks to managed DTDs and entities */ + handler->getEntity = xmlSAX2GetEntity; + handler->internalSubset = xmlSAX2InternalSubset; + handler->externalSubset = xmlSAX2ExternalSubset; + handler->isStandalone = xmlSAX2IsStandalone; + handler->hasInternalSubset = xmlSAX2HasInternalSubset; + handler->hasExternalSubset = xmlSAX2HasExternalSubset; + handler->resolveEntity = xmlSAX2ResolveEntity; + handler->getParameterEntity = xmlSAX2GetParameterEntity; + handler->entityDecl = xmlSAX2EntityDecl; + handler->unparsedEntityDecl = xmlSAX2UnparsedEntityDecl; + + handler->initialized = XML_SAX2_MAGIC; + + return self; +} + +static VALUE +noko_xml_sax_parser_allocate(VALUE klass) +{ + xmlSAXHandlerPtr handler; + return TypedData_Make_Struct(klass, xmlSAXHandler, &xml_sax_parser_type, handler); +} + +xmlSAXHandlerPtr +noko_xml_sax_parser_unwrap(VALUE rb_sax_handler) +{ + xmlSAXHandlerPtr c_sax_handler; + TypedData_Get_Struct(rb_sax_handler, xmlSAXHandler, &xml_sax_parser_type, c_sax_handler); + return c_sax_handler; +} + +void +noko_init_xml_sax_parser(void) +{ + cNokogiriXmlSaxParser = rb_define_class_under(mNokogiriXmlSax, "Parser", rb_cObject); + + rb_define_alloc_func(cNokogiriXmlSaxParser, noko_xml_sax_parser_allocate); + + rb_define_private_method(cNokogiriXmlSaxParser, "initialize_native", + noko_xml_sax_parser__initialize_native, 0); + + id_start_document = rb_intern("start_document"); + id_end_document = rb_intern("end_document"); + id_start_element = rb_intern("start_element"); + id_end_element = rb_intern("end_element"); + id_comment = rb_intern("comment"); + id_characters = rb_intern("characters"); + id_xmldecl = rb_intern("xmldecl"); + id_error = rb_intern("error"); + id_warning = rb_intern("warning"); + id_cdata_block = rb_intern("cdata_block"); + id_start_element_namespace = rb_intern("start_element_namespace"); + id_end_element_namespace = rb_intern("end_element_namespace"); + id_processing_instruction = rb_intern("processing_instruction"); + id_reference = rb_intern("reference"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser_context.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser_context.c new file mode 100644 index 000000000..0d2b65b59 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_parser_context.c @@ -0,0 +1,396 @@ +#include + +VALUE cNokogiriXmlSaxParserContext ; + +static ID id_read; + +static void +xml_sax_parser_context_type_free(void *data) +{ + xmlParserCtxtPtr ctxt = data; + ctxt->sax = NULL; + if (ctxt->myDoc) { + xmlFreeDoc(ctxt->myDoc); + } + if (ctxt) { + xmlFreeParserCtxt(ctxt); + } +} + +/* + * note that htmlParserCtxtPtr == xmlParserCtxtPtr and xmlFreeParserCtxt() == htmlFreeParserCtxt() + * so we use this type for both XML::SAX::ParserContext and HTML::SAX::ParserContext + */ +static const rb_data_type_t xml_sax_parser_context_type = { + .wrap_struct_name = "xmlParserCtxt", + .function = { + .dfree = xml_sax_parser_context_type_free, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +xmlParserCtxtPtr +noko_xml_sax_parser_context_unwrap(VALUE rb_context) +{ + xmlParserCtxtPtr c_context; + TypedData_Get_Struct(rb_context, xmlParserCtxt, &xml_sax_parser_context_type, c_context); + return c_context; +} + +VALUE +noko_xml_sax_parser_context_wrap(VALUE klass, xmlParserCtxtPtr c_context) +{ + return TypedData_Wrap_Struct(klass, &xml_sax_parser_context_type, c_context); +} + +void +noko_xml_sax_parser_context_set_encoding(xmlParserCtxtPtr c_context, VALUE rb_encoding) +{ + if (!NIL_P(rb_encoding)) { + VALUE rb_encoding_name = rb_funcall(rb_encoding, rb_intern("name"), 0); + + char *encoding_name = StringValueCStr(rb_encoding_name); + if (encoding_name) { + libxmlStructuredErrorHandlerState handler_state; + VALUE rb_errors = rb_ary_new(); + + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + + int result = xmlSwitchEncodingName(c_context, encoding_name); + + noko__structured_error_func_restore(&handler_state); + + if (result != 0) { + xmlFreeParserCtxt(c_context); + + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + if (!NIL_P(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "could not set encoding"); + } + } + } + } +} + +/* :nodoc: */ +static VALUE +noko_xml_sax_parser_context_s_native_io(VALUE rb_class, VALUE rb_io, VALUE rb_encoding) +{ + if (!rb_respond_to(rb_io, id_read)) { + rb_raise(rb_eTypeError, "argument expected to respond to :read"); + } + + if (!NIL_P(rb_encoding) && !rb_obj_is_kind_of(rb_encoding, rb_cEncoding)) { + rb_raise(rb_eTypeError, "argument must be an Encoding object"); + } + + xmlParserCtxtPtr c_context = + xmlCreateIOParserCtxt(NULL, NULL, + (xmlInputReadCallback)noko_io_read, + (xmlInputCloseCallback)noko_io_close, + (void *)rb_io, XML_CHAR_ENCODING_NONE); + if (!c_context) { + rb_raise(rb_eRuntimeError, "failed to create xml sax parser context"); + } + + noko_xml_sax_parser_context_set_encoding(c_context, rb_encoding); + + if (c_context->sax) { + xmlFree(c_context->sax); + c_context->sax = NULL; + } + + VALUE rb_context = noko_xml_sax_parser_context_wrap(rb_class, c_context); + rb_iv_set(rb_context, "@input", rb_io); + + return rb_context; +} + +/* :nodoc: */ +static VALUE +noko_xml_sax_parser_context_s_native_file(VALUE rb_class, VALUE rb_path, VALUE rb_encoding) +{ + if (!NIL_P(rb_encoding) && !rb_obj_is_kind_of(rb_encoding, rb_cEncoding)) { + rb_raise(rb_eTypeError, "argument must be an Encoding object"); + } + + xmlParserCtxtPtr c_context = xmlCreateFileParserCtxt(StringValueCStr(rb_path)); + if (!c_context) { + rb_raise(rb_eRuntimeError, "failed to create xml sax parser context"); + } + + noko_xml_sax_parser_context_set_encoding(c_context, rb_encoding); + + if (c_context->sax) { + xmlFree(c_context->sax); + c_context->sax = NULL; + } + + return noko_xml_sax_parser_context_wrap(rb_class, c_context); +} + +/* :nodoc: */ +static VALUE +noko_xml_sax_parser_context_s_native_memory(VALUE rb_class, VALUE rb_input, VALUE rb_encoding) +{ + Check_Type(rb_input, T_STRING); + if (!(int)RSTRING_LEN(rb_input)) { + rb_raise(rb_eRuntimeError, "input string cannot be empty"); + } + + if (!NIL_P(rb_encoding) && !rb_obj_is_kind_of(rb_encoding, rb_cEncoding)) { + rb_raise(rb_eTypeError, "argument must be an Encoding object"); + } + + xmlParserCtxtPtr c_context = + xmlCreateMemoryParserCtxt(StringValuePtr(rb_input), (int)RSTRING_LEN(rb_input)); + if (!c_context) { + rb_raise(rb_eRuntimeError, "failed to create xml sax parser context"); + } + + noko_xml_sax_parser_context_set_encoding(c_context, rb_encoding); + + if (c_context->sax) { + xmlFree(c_context->sax); + c_context->sax = NULL; + } + + VALUE rb_context = noko_xml_sax_parser_context_wrap(rb_class, c_context); + rb_iv_set(rb_context, "@input", rb_input); + + return rb_context; +} + +/* + * call-seq: + * parse_with(sax_handler) + * + * Use +sax_handler+ and parse the current document + * + * 💡 Calling this method directly is discouraged. Use Nokogiri::XML::SAX::Parser methods which are + * more convenient for most use cases. + */ +static VALUE +noko_xml_sax_parser_context__parse_with(VALUE rb_context, VALUE rb_sax_parser) +{ + xmlParserCtxtPtr c_context; + xmlSAXHandlerPtr sax; + + if (!rb_obj_is_kind_of(rb_sax_parser, cNokogiriXmlSaxParser)) { + rb_raise(rb_eArgError, "argument must be a Nokogiri::XML::SAX::Parser"); + } + + c_context = noko_xml_sax_parser_context_unwrap(rb_context); + sax = noko_xml_sax_parser_unwrap(rb_sax_parser); + + c_context->sax = sax; + c_context->userData = c_context; /* so we can use libxml2/SAX2.c handlers if we want to */ + c_context->_private = (void *)rb_sax_parser; + + xmlSetStructuredErrorFunc(NULL, NULL); + + /* although we're calling back into Ruby here, we don't need to worry about exceptions, because we + * don't have any cleanup to do. The only memory we need to free is handled by + * xml_sax_parser_context_type_free */ + xmlParseDocument(c_context); + + return Qnil; +} + +/* + * call-seq: + * replace_entities=(value) + * + * See Document@Entity+Handling for an explanation of the behavior controlled by this flag. + * + * [Parameters] + * - +value+ (Boolean) Whether external parsed entities will be resolved. + * + * ⚠ It is UNSAFE to set this option to +true+ when parsing untrusted documents. The option + * defaults to +false+ for this reason. + * + * This option is perhaps misnamed by the libxml2 author, since it controls resolution and not + * replacement. + * + * [Example] + * Because this class is generally not instantiated directly, you would typically set this option + * via the block argument to Nokogiri::XML::SAX::Parser.parse et al: + * + * parser = Nokogiri::XML::SAX::Parser.new(document_handler) + * parser.parse(xml) do |ctx| + * ctx.replace_entities = true # this is UNSAFE for untrusted documents! + * end + */ +static VALUE +noko_xml_sax_parser_context__replace_entities_set(VALUE rb_context, VALUE rb_value) +{ + int error; + xmlParserCtxtPtr ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + + if (RB_TEST(rb_value)) { + error = xmlCtxtSetOptions(ctxt, xmlCtxtGetOptions(ctxt) | XML_PARSE_NOENT); + } else { + error = xmlCtxtSetOptions(ctxt, xmlCtxtGetOptions(ctxt) & ~XML_PARSE_NOENT); + } + + if (error) { + rb_raise(rb_eRuntimeError, "failed to set parser context options (%x)", error); + } + + return rb_value; +} + +/* + * call-seq: + * replace_entities + * + * See Document@Entity+Handling for an explanation of the behavior controlled by this flag. + * + * [Returns] (Boolean) Value of the parse option. (Default +false+) + * + * This option is perhaps misnamed by the libxml2 author, since it controls resolution and not + * replacement. + */ +static VALUE +noko_xml_sax_parser_context__replace_entities_get(VALUE rb_context) +{ + xmlParserCtxtPtr ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + + if (xmlCtxtGetOptions(ctxt) & XML_PARSE_NOENT) { + return Qtrue; + } else { + return Qfalse; + } +} + +/* + * call-seq: line + * + * [Returns] (Integer) the line number of the line being currently parsed. + */ +static VALUE +noko_xml_sax_parser_context__line(VALUE rb_context) +{ + xmlParserInputPtr io; + xmlParserCtxtPtr ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + + io = ctxt->input; + if (io) { + return INT2NUM(io->line); + } + + return Qnil; +} + +/* + * call-seq: column + * + * [Returns] (Integer) the column number of the column being currently parsed. + */ +static VALUE +noko_xml_sax_parser_context__column(VALUE rb_context) +{ + xmlParserCtxtPtr ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + xmlParserInputPtr io; + + io = ctxt->input; + if (io) { + return INT2NUM(io->col); + } + + return Qnil; +} + +/* + * call-seq: + * recovery=(value) + * + * Controls whether this parser will recover from parsing errors. If set to +true+, the parser will + * invoke the SAX::Document#error callback and continue processing the file. If set to +false+, the + * parser will stop processing the file on the first parsing error. + * + * [Parameters] + * - +value+ (Boolean) Recover from parsing errors. (Default is +false+ for XML and +true+ for HTML.) + * + * [Returns] (Boolean) The passed +value+. + * + * [Example] + * Because this class is generally not instantiated directly, you would typically set this option + * via the block argument to Nokogiri::XML::SAX::Parser.parse et al: + * + * parser = Nokogiri::XML::SAX::Parser.new(document_handler) + * parser.parse(xml) do |ctx| + * ctx.recovery = true + * end + */ +static VALUE +noko_xml_sax_parser_context__recovery_set(VALUE rb_context, VALUE rb_value) +{ + int error; + xmlParserCtxtPtr ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + + if (RB_TEST(rb_value)) { + error = xmlCtxtSetOptions(ctxt, xmlCtxtGetOptions(ctxt) | XML_PARSE_RECOVER); + } else { + error = xmlCtxtSetOptions(ctxt, xmlCtxtGetOptions(ctxt) & ~XML_PARSE_RECOVER); + } + + if (error) { + rb_raise(rb_eRuntimeError, "failed to set parser context options (%x)", error); + } + + return rb_value; +} + +/* + * call-seq: + * recovery + * + * Inspect whether this parser will recover from parsing errors. If set to +true+, the parser will + * invoke the SAX::Document#error callback and continue processing the file. If set to +false+, the + * parser will stop processing the file on the first parsing error. + * + * [Returns] (Boolean) Whether this parser will recover from parsing errors. + * + * Default is +false+ for XML and +true+ for HTML. + */ +static VALUE +noko_xml_sax_parser_context__recovery_get(VALUE rb_context) +{ + xmlParserCtxtPtr ctxt = noko_xml_sax_parser_context_unwrap(rb_context); + + if (xmlCtxtGetOptions(ctxt) & XML_PARSE_RECOVER) { + return Qtrue; + } else { + return Qfalse; + } +} + +void +noko_init_xml_sax_parser_context(void) +{ + cNokogiriXmlSaxParserContext = rb_define_class_under(mNokogiriXmlSax, "ParserContext", rb_cObject); + + rb_undef_alloc_func(cNokogiriXmlSaxParserContext); + + rb_define_singleton_method(cNokogiriXmlSaxParserContext, "native_io", + noko_xml_sax_parser_context_s_native_io, 2); + rb_define_singleton_method(cNokogiriXmlSaxParserContext, "native_memory", + noko_xml_sax_parser_context_s_native_memory, 2); + rb_define_singleton_method(cNokogiriXmlSaxParserContext, "native_file", + noko_xml_sax_parser_context_s_native_file, 2); + + rb_define_method(cNokogiriXmlSaxParserContext, "parse_with", noko_xml_sax_parser_context__parse_with, 1); + rb_define_method(cNokogiriXmlSaxParserContext, "replace_entities=", + noko_xml_sax_parser_context__replace_entities_set, 1); + rb_define_method(cNokogiriXmlSaxParserContext, "replace_entities", + noko_xml_sax_parser_context__replace_entities_get, 0); + rb_define_method(cNokogiriXmlSaxParserContext, "recovery=", noko_xml_sax_parser_context__recovery_set, 1); + rb_define_method(cNokogiriXmlSaxParserContext, "recovery", noko_xml_sax_parser_context__recovery_get, 0); + rb_define_method(cNokogiriXmlSaxParserContext, "line", noko_xml_sax_parser_context__line, 0); + rb_define_method(cNokogiriXmlSaxParserContext, "column", noko_xml_sax_parser_context__column, 0); + + id_read = rb_intern("read"); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_push_parser.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_push_parser.c new file mode 100644 index 000000000..e6bffbfe3 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_sax_push_parser.c @@ -0,0 +1,206 @@ +#include + +VALUE cNokogiriXmlSaxPushParser ; + +static void +xml_sax_push_parser_free(void *data) +{ + xmlParserCtxtPtr ctx = data; + if (ctx->myDoc) { + xmlFreeDoc(ctx->myDoc); + } + if (ctx) { + xmlFreeParserCtxt(ctx); + } +} + +static const rb_data_type_t xml_sax_push_parser_type = { + .wrap_struct_name = "xmlParserCtxt", + .function = { + .dfree = xml_sax_push_parser_free, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +static VALUE +xml_sax_push_parser_allocate(VALUE klass) +{ + return TypedData_Wrap_Struct(klass, &xml_sax_push_parser_type, NULL); +} + +xmlParserCtxtPtr +noko_xml_sax_push_parser_unwrap(VALUE rb_parser) +{ + xmlParserCtxtPtr c_parser; + TypedData_Get_Struct(rb_parser, xmlParserCtxt, &xml_sax_push_parser_type, c_parser); + return c_parser; +} + +/* + * Write +chunk+ to PushParser. +last_chunk+ triggers the end_document handle + */ +static VALUE +noko_xml_sax_push_parser__native_write(VALUE self, VALUE _chunk, VALUE _last_chunk) +{ + xmlParserCtxtPtr ctx; + const char *chunk = NULL; + int size = 0; + + ctx = noko_xml_sax_push_parser_unwrap(self); + + if (Qnil != _chunk) { + chunk = StringValuePtr(_chunk); + size = (int)RSTRING_LEN(_chunk); + } + + xmlSetStructuredErrorFunc(NULL, NULL); + + if (xmlParseChunk(ctx, chunk, size, Qtrue == _last_chunk ? 1 : 0)) { + if (!(xmlCtxtGetOptions(ctx) & XML_PARSE_RECOVER)) { + xmlErrorConstPtr e = xmlCtxtGetLastError(ctx); + noko__error_raise(NULL, e); + } + } + + return self; +} + +/* + * call-seq: + * initialize_native(xml_sax, filename) + * + * Initialize the push parser with +xml_sax+ using +filename+ + */ +static VALUE +noko_xml_sax_push_parser__initialize_native(VALUE self, VALUE _xml_sax, VALUE _filename) +{ + xmlSAXHandlerPtr sax; + const char *filename = NULL; + xmlParserCtxtPtr ctx; + + sax = noko_xml_sax_parser_unwrap(_xml_sax); + + if (_filename != Qnil) { filename = StringValueCStr(_filename); } + + ctx = xmlCreatePushParserCtxt( + sax, + NULL, + NULL, + 0, + filename + ); + if (ctx == NULL) { + rb_raise(rb_eRuntimeError, "Could not create a parser context"); + } + + ctx->userData = ctx; + ctx->_private = (void *)_xml_sax; + + DATA_PTR(self) = ctx; + return self; +} + +static VALUE +noko_xml_sax_push_parser__options_get(VALUE self) +{ + xmlParserCtxtPtr ctx; + + ctx = noko_xml_sax_push_parser_unwrap(self); + + return INT2NUM(xmlCtxtGetOptions(ctx)); +} + +static VALUE +noko_xml_sax_push_parser__options_set(VALUE self, VALUE options) +{ + int error; + xmlParserCtxtPtr ctx; + + ctx = noko_xml_sax_push_parser_unwrap(self); + + error = xmlCtxtSetOptions(ctx, (int)NUM2INT(options)); + if (error) { + rb_raise(rb_eRuntimeError, "Cannot set XML parser context options (%x)", error); + } + + return Qnil; +} + +/* + * call-seq: + * replace_entities + * + * See Document@Entity+Handling for an explanation of the behavior controlled by this flag. + * + * [Returns] (Boolean) Value of the parse option. (Default +false+) + * + * This option is perhaps misnamed by the libxml2 author, since it controls resolution and not + * replacement. + */ +static VALUE +noko_xml_sax_push_parser__replace_entities_get(VALUE self) +{ + xmlParserCtxtPtr ctxt = noko_xml_sax_push_parser_unwrap(self); + + if (xmlCtxtGetOptions(ctxt) & XML_PARSE_NOENT) { + return Qtrue; + } else { + return Qfalse; + } +} + +/* + * call-seq: + * replace_entities=(value) + * + * See Document@Entity+Handling for an explanation of the behavior controlled by this flag. + * + * [Parameters] + * - +value+ (Boolean) Whether external parsed entities will be resolved. + * + * ⚠ It is UNSAFE to set this option to +true+ when parsing untrusted documents. The option + * defaults to +false+ for this reason. + * + * This option is perhaps misnamed by the libxml2 author, since it controls resolution and not + * replacement. + */ +static VALUE +noko_xml_sax_push_parser__replace_entities_set(VALUE self, VALUE value) +{ + int error; + xmlParserCtxtPtr ctxt = noko_xml_sax_push_parser_unwrap(self); + + if (RB_TEST(value)) { + error = xmlCtxtSetOptions(ctxt, xmlCtxtGetOptions(ctxt) | XML_PARSE_NOENT); + } else { + error = xmlCtxtSetOptions(ctxt, xmlCtxtGetOptions(ctxt) & ~XML_PARSE_NOENT); + } + + if (error) { + rb_raise(rb_eRuntimeError, "failed to set parser context options (%x)", error); + } + + return value; +} + +void +noko_init_xml_sax_push_parser(void) +{ + cNokogiriXmlSaxPushParser = rb_define_class_under(mNokogiriXmlSax, "PushParser", rb_cObject); + + rb_define_alloc_func(cNokogiriXmlSaxPushParser, xml_sax_push_parser_allocate); + + rb_define_method(cNokogiriXmlSaxPushParser, "options", + noko_xml_sax_push_parser__options_get, 0); + rb_define_method(cNokogiriXmlSaxPushParser, "options=", + noko_xml_sax_push_parser__options_set, 1); + rb_define_method(cNokogiriXmlSaxPushParser, "replace_entities", + noko_xml_sax_push_parser__replace_entities_get, 0); + rb_define_method(cNokogiriXmlSaxPushParser, "replace_entities=", + noko_xml_sax_push_parser__replace_entities_set, 1); + + rb_define_private_method(cNokogiriXmlSaxPushParser, "initialize_native", + noko_xml_sax_push_parser__initialize_native, 2); + rb_define_private_method(cNokogiriXmlSaxPushParser, "native_write", + noko_xml_sax_push_parser__native_write, 2); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_schema.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_schema.c new file mode 100644 index 000000000..b2bded981 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_schema.c @@ -0,0 +1,226 @@ +#include + +VALUE cNokogiriXmlSchema; + +static void +xml_schema_deallocate(void *data) +{ + xmlSchemaPtr schema = data; + xmlSchemaFree(schema); +} + +static const rb_data_type_t xml_schema_type = { + .wrap_struct_name = "xmlSchema", + .function = { + .dfree = xml_schema_deallocate, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +static VALUE +noko_xml_schema__validate_document(VALUE self, VALUE document) +{ + xmlDocPtr doc; + xmlSchemaPtr schema; + xmlSchemaValidCtxtPtr valid_ctxt; + VALUE errors; + + TypedData_Get_Struct(self, xmlSchema, &xml_schema_type, schema); + doc = noko_xml_document_unwrap(document); + + errors = rb_ary_new(); + + valid_ctxt = xmlSchemaNewValidCtxt(schema); + + if (NULL == valid_ctxt) { + /* we have a problem */ + rb_raise(rb_eRuntimeError, "Could not create a validation context"); + } + + xmlSchemaSetValidStructuredErrors( + valid_ctxt, + noko__error_array_pusher, + (void *)errors + ); + + int status = xmlSchemaValidateDoc(valid_ctxt, doc); + + xmlSchemaFreeValidCtxt(valid_ctxt); + + if (status != 0) { + if (RARRAY_LEN(errors) == 0) { + rb_ary_push(errors, rb_str_new2("Could not validate document")); + } + } + + return errors; +} + +static VALUE +noko_xml_schema__validate_file(VALUE self, VALUE rb_filename) +{ + xmlSchemaPtr schema; + xmlSchemaValidCtxtPtr valid_ctxt; + const char *filename ; + VALUE errors; + + TypedData_Get_Struct(self, xmlSchema, &xml_schema_type, schema); + filename = (const char *)StringValueCStr(rb_filename) ; + + errors = rb_ary_new(); + + valid_ctxt = xmlSchemaNewValidCtxt(schema); + + if (NULL == valid_ctxt) { + /* we have a problem */ + rb_raise(rb_eRuntimeError, "Could not create a validation context"); + } + + xmlSchemaSetValidStructuredErrors( + valid_ctxt, + noko__error_array_pusher, + (void *)errors + ); + + int status = xmlSchemaValidateFile(valid_ctxt, filename, 0); + + xmlSchemaFreeValidCtxt(valid_ctxt); + + if (status != 0) { + if (RARRAY_LEN(errors) == 0) { + rb_ary_push(errors, rb_str_new2("Could not validate file.")); + } + } + + return errors; +} + +static VALUE +xml_schema_parse_schema( + VALUE rb_class, + xmlSchemaParserCtxtPtr c_parser_context, + VALUE rb_parse_options +) +{ + xmlExternalEntityLoader saved_loader = 0; + libxmlStructuredErrorHandlerState handler_state; + + if (NIL_P(rb_parse_options)) { + rb_parse_options = rb_const_get_at( + rb_const_get_at(mNokogiriXml, rb_intern("ParseOptions")), + rb_intern("DEFAULT_SCHEMA") + ); + } + int c_parse_options = (int)NUM2INT(rb_funcall(rb_parse_options, rb_intern("to_i"), 0)); + + VALUE rb_errors = rb_ary_new(); + noko__structured_error_func_save_and_set(&handler_state, (void *)rb_errors, noko__error_array_pusher); + + xmlSchemaSetParserStructuredErrors( + c_parser_context, + noko__error_array_pusher, + (void *)rb_errors + ); + + if (c_parse_options & XML_PARSE_NONET) { + saved_loader = xmlGetExternalEntityLoader(); + xmlSetExternalEntityLoader(xmlNoNetExternalEntityLoader); + } + + xmlSchemaPtr c_schema = xmlSchemaParse(c_parser_context); + + if (saved_loader) { + xmlSetExternalEntityLoader(saved_loader); + } + + xmlSchemaFreeParserCtxt(c_parser_context); + noko__structured_error_func_restore(&handler_state); + + if (NULL == c_schema) { + VALUE exception = rb_funcall(cNokogiriXmlSyntaxError, rb_intern("aggregate"), 1, rb_errors); + if (RB_TEST(exception)) { + rb_exc_raise(exception); + } else { + rb_raise(rb_eRuntimeError, "Could not parse document"); + } + } + + VALUE rb_schema = TypedData_Wrap_Struct(rb_class, &xml_schema_type, c_schema); + rb_iv_set(rb_schema, "@errors", rb_errors); + rb_iv_set(rb_schema, "@parse_options", rb_parse_options); + + return rb_schema; +} + +/* + * :call-seq: + * from_document(input) → Nokogiri::XML::Schema + * from_document(input, parse_options) → Nokogiri::XML::Schema + * + * Parse an \XSD schema definition from a Document to create a new Nokogiri::XML::Schema + * + * [Parameters] + * - +input+ (XML::Document) A document containing the \XSD schema definition + * - +parse_options+ (Nokogiri::XML::ParseOptions) + * Defaults to Nokogiri::XML::ParseOptions::DEFAULT_SCHEMA + * + * [Returns] Nokogiri::XML::Schema + */ +static VALUE +noko_xml_schema_s_from_document(int argc, VALUE *argv, VALUE rb_class) +{ + /* TODO: deprecate this method and put file-or-string logic into .new so that becomes the + * preferred entry point, and this can become a private method */ + VALUE rb_document; + VALUE rb_parse_options; + VALUE rb_schema; + xmlDocPtr c_document; + xmlSchemaParserCtxtPtr c_parser_context; + int defensive_copy_p = 0; + + rb_scan_args(argc, argv, "11", &rb_document, &rb_parse_options); + + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlNode)) { + rb_raise(rb_eTypeError, + "expected parameter to be a Nokogiri::XML::Document, received %"PRIsVALUE, + rb_obj_class(rb_document)); + } + + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlDocument)) { + xmlNodePtr deprecated_node_type_arg; + NOKO_WARN_DEPRECATION("Passing a Node as the first parameter to Schema.from_document is deprecated. Please pass a Document instead. This will become an error in Nokogiri v1.17.0."); // TODO: deprecated in v1.15.3, remove in v1.17.0 + Noko_Node_Get_Struct(rb_document, xmlNode, deprecated_node_type_arg); + c_document = deprecated_node_type_arg->doc; + } else { + c_document = noko_xml_document_unwrap(rb_document); + } + + if (noko_xml_document_has_wrapped_blank_nodes_p(c_document)) { + // see https://github.com/sparklemotion/nokogiri/pull/2001 + c_document = xmlCopyDoc(c_document, 1); + defensive_copy_p = 1; + } + + c_parser_context = xmlSchemaNewDocParserCtxt(c_document); + rb_schema = xml_schema_parse_schema(rb_class, c_parser_context, rb_parse_options); + + if (defensive_copy_p) { + xmlFreeDoc(c_document); + c_document = NULL; + } + + return rb_schema; +} + +void +noko_init_xml_schema(void) +{ + cNokogiriXmlSchema = rb_define_class_under(mNokogiriXml, "Schema", rb_cObject); + + rb_undef_alloc_func(cNokogiriXmlSchema); + + rb_define_singleton_method(cNokogiriXmlSchema, "from_document", noko_xml_schema_s_from_document, -1); + + rb_define_private_method(cNokogiriXmlSchema, "validate_document", noko_xml_schema__validate_document, 1); + rb_define_private_method(cNokogiriXmlSchema, "validate_file", noko_xml_schema__validate_file, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_syntax_error.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_syntax_error.c new file mode 100644 index 000000000..d0a3bf8d1 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_syntax_error.c @@ -0,0 +1,93 @@ +#include + +VALUE cNokogiriXmlSyntaxError; + +void +noko__structured_error_func_save(libxmlStructuredErrorHandlerState *handler_state) +{ + /* this method is tightly coupled to the implementation of xmlSetStructuredErrorFunc */ + handler_state->user_data = xmlStructuredErrorContext; + handler_state->handler = xmlStructuredError; +} + +void +noko__structured_error_func_save_and_set( + libxmlStructuredErrorHandlerState *handler_state, + void *user_data, + xmlStructuredErrorFunc handler +) +{ + noko__structured_error_func_save(handler_state); + xmlSetStructuredErrorFunc(user_data, handler); +} + +void +noko__structured_error_func_restore(libxmlStructuredErrorHandlerState *handler_state) +{ + xmlSetStructuredErrorFunc(handler_state->user_data, handler_state->handler); +} + +void +noko__error_array_pusher(void *ctx, xmlErrorConstPtr error) +{ + VALUE list = (VALUE)ctx; + Check_Type(list, T_ARRAY); + rb_ary_push(list, noko_xml_syntax_error__wrap(error)); +} + +void +noko__error_raise(void *ctx, xmlErrorConstPtr error) +{ + rb_exc_raise(noko_xml_syntax_error__wrap(error)); +} + +VALUE +noko_xml_syntax_error__wrap(xmlErrorConstPtr error) +{ + xmlChar *c_path ; + VALUE msg, e, klass; + + klass = cNokogiriXmlSyntaxError; + + if (error && error->domain == XML_FROM_XPATH) { + klass = cNokogiriXmlXpathSyntaxError; + } + + msg = (error && error->message) ? NOKOGIRI_STR_NEW2(error->message) : Qnil; + + e = rb_class_new_instance( + 1, + &msg, + klass + ); + + if (error) { + c_path = xmlGetNodePath(error->node); + + rb_iv_set(e, "@domain", INT2NUM(error->domain)); + rb_iv_set(e, "@code", INT2NUM(error->code)); + rb_iv_set(e, "@level", INT2NUM((short)error->level)); + rb_iv_set(e, "@file", RBSTR_OR_QNIL(error->file)); + rb_iv_set(e, "@line", INT2NUM(error->line)); + rb_iv_set(e, "@path", RBSTR_OR_QNIL(c_path)); + rb_iv_set(e, "@str1", RBSTR_OR_QNIL(error->str1)); + rb_iv_set(e, "@str2", RBSTR_OR_QNIL(error->str2)); + rb_iv_set(e, "@str3", RBSTR_OR_QNIL(error->str3)); + rb_iv_set(e, "@int1", INT2NUM(error->int1)); + rb_iv_set(e, "@column", INT2NUM(error->int2)); + + xmlFree(c_path); + } + + return e; +} + +void +noko_init_xml_syntax_error(void) +{ + assert(cNokogiriSyntaxError); + /* + * The XML::SyntaxError is raised on parse errors + */ + cNokogiriXmlSyntaxError = rb_define_class_under(mNokogiriXml, "SyntaxError", cNokogiriSyntaxError); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_text.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_text.c new file mode 100644 index 000000000..e15de27f9 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_text.c @@ -0,0 +1,59 @@ +#include + +VALUE cNokogiriXmlText ; + +/* + * call-seq: + * new(content, document) + * + * Create a new Text element on the +document+ with +content+ + */ +static VALUE +rb_xml_text_s_new(int argc, VALUE *argv, VALUE klass) +{ + xmlDocPtr c_document; + xmlNodePtr c_node; + VALUE rb_string; + VALUE rb_document; + VALUE rb_rest; + VALUE rb_node; + + rb_scan_args(argc, argv, "2*", &rb_string, &rb_document, &rb_rest); + + Check_Type(rb_string, T_STRING); + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlNode)) { + rb_raise(rb_eTypeError, + "expected second parameter to be a Nokogiri::XML::Document, received %"PRIsVALUE, + rb_obj_class(rb_document)); + } + + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlDocument)) { + xmlNodePtr deprecated_node_type_arg; + NOKO_WARN_DEPRECATION("Passing a Node as the second parameter to Text.new is deprecated. Please pass a Document instead. This will become an error in Nokogiri v1.17.0."); // TODO: deprecated in v1.15.3, remove in v1.17.0 + Noko_Node_Get_Struct(rb_document, xmlNode, deprecated_node_type_arg); + c_document = deprecated_node_type_arg->doc; + } else { + c_document = noko_xml_document_unwrap(rb_document); + } + + c_node = xmlNewDocText(c_document, (xmlChar *)StringValueCStr(rb_string)); + noko_xml_document_pin_node(c_node); + rb_node = noko_xml_node_wrap(klass, c_node) ; + rb_obj_call_init(rb_node, argc, argv); + + if (rb_block_given_p()) { rb_yield(rb_node); } + + return rb_node; +} + +void +noko_init_xml_text(void) +{ + assert(cNokogiriXmlCharacterData); + /* + * Wraps Text nodes. + */ + cNokogiriXmlText = rb_define_class_under(mNokogiriXml, "Text", cNokogiriXmlCharacterData); + + rb_define_singleton_method(cNokogiriXmlText, "new", rb_xml_text_s_new, -1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_xpath_context.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_xpath_context.c new file mode 100644 index 000000000..8f71c2e31 --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xml_xpath_context.c @@ -0,0 +1,486 @@ +#include + +VALUE cNokogiriXmlXpathContext; + +/* + * these constants have matching declarations in + * ext/java/nokogiri/internals/NokogiriNamespaceContext.java + */ +static const xmlChar *NOKOGIRI_PREFIX = (const xmlChar *)"nokogiri"; +static const xmlChar *NOKOGIRI_URI = (const xmlChar *)"http://www.nokogiri.org/default_ns/ruby/extensions_functions"; +static const xmlChar *NOKOGIRI_BUILTIN_PREFIX = (const xmlChar *)"nokogiri-builtin"; +static const xmlChar *NOKOGIRI_BUILTIN_URI = (const xmlChar *)"https://www.nokogiri.org/default_ns/ruby/builtins"; + +static void +_noko_xml_xpath_context_dfree(void *data) +{ + xmlXPathContextPtr c_context = data; + xmlXPathFreeContext(c_context); +} + +static const rb_data_type_t _noko_xml_xpath_context_type = { + .wrap_struct_name = "xmlXPathContext", + .function = { + .dfree = _noko_xml_xpath_context_dfree, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED, +}; + +/* find a CSS class in an HTML element's `class` attribute */ +static const xmlChar * +_noko_xml_xpath_context__css_class(const xmlChar *str, const xmlChar *val) +{ + int val_len; + + if (str == NULL) { return (NULL); } + if (val == NULL) { return (NULL); } + + val_len = xmlStrlen(val); + if (val_len == 0) { return (str); } + + while (*str != 0) { + if ((*str == *val) && !xmlStrncmp(str, val, val_len)) { + const xmlChar *next_byte = str + val_len; + + /* only match if the next byte is whitespace or end of string */ + if ((*next_byte == 0) || (IS_BLANK_CH(*next_byte))) { + return ((const xmlChar *)str); + } + } + + /* advance str to whitespace */ + while ((*str != 0) && !IS_BLANK_CH(*str)) { + str++; + } + + /* advance str to start of next word or end of string */ + while ((*str != 0) && IS_BLANK_CH(*str)) { + str++; + } + } + + return (NULL); +} + +/* xmlXPathFunction to wrap _noko_xml_xpath_context__css_class() */ +static void +noko_xml_xpath_context_xpath_func_css_class(xmlXPathParserContextPtr ctxt, int nargs) +{ + xmlXPathObjectPtr hay, needle; + + CHECK_ARITY(2); + + CAST_TO_STRING; + needle = valuePop(ctxt); + if ((needle == NULL) || (needle->type != XPATH_STRING)) { + xmlXPathFreeObject(needle); + XP_ERROR(XPATH_INVALID_TYPE); + } + + CAST_TO_STRING; + hay = valuePop(ctxt); + if ((hay == NULL) || (hay->type != XPATH_STRING)) { + xmlXPathFreeObject(hay); + xmlXPathFreeObject(needle); + XP_ERROR(XPATH_INVALID_TYPE); + } + + if (_noko_xml_xpath_context__css_class(hay->stringval, needle->stringval)) { + valuePush(ctxt, xmlXPathNewBoolean(1)); + } else { + valuePush(ctxt, xmlXPathNewBoolean(0)); + } + + xmlXPathFreeObject(hay); + xmlXPathFreeObject(needle); +} + + +/* xmlXPathFunction to select nodes whose local name matches, for HTML5 CSS queries that should + * ignore namespaces */ +static void +noko_xml_xpath_context_xpath_func_local_name_is(xmlXPathParserContextPtr ctxt, int nargs) +{ + xmlXPathObjectPtr element_name; + + assert(ctxt->context->node); + + CHECK_ARITY(1); + CAST_TO_STRING; + CHECK_TYPE(XPATH_STRING); + element_name = valuePop(ctxt); + + valuePush( + ctxt, + xmlXPathNewBoolean(xmlStrEqual(ctxt->context->node->name, element_name->stringval)) + ); + + xmlXPathFreeObject(element_name); +} + + +/* + * call-seq: + * register_ns(prefix, uri) → Nokogiri::XML::XPathContext + * + * Register the namespace with +prefix+ and +uri+ for use in future queries. + * Passing a uri of +nil+ will unregister the namespace. + * + * [Returns] +self+ + */ +static VALUE +noko_xml_xpath_context_register_ns(VALUE rb_context, VALUE prefix, VALUE uri) +{ + xmlXPathContextPtr c_context; + const xmlChar *ns_uri; + + TypedData_Get_Struct(rb_context, xmlXPathContext, &_noko_xml_xpath_context_type, c_context); + + if (NIL_P(uri)) { + ns_uri = NULL; + } else { + ns_uri = (const xmlChar *)StringValueCStr(uri); + } + + xmlXPathRegisterNs(c_context, (const xmlChar *)StringValueCStr(prefix), ns_uri); + + return rb_context; +} + +/* + * call-seq: + * register_variable(name, value) → Nokogiri::XML::XPathContext + * + * Register the variable +name+ with +value+ for use in future queries. + * Passing a value of +nil+ will unregister the variable. + * + * [Returns] +self+ + */ +static VALUE +noko_xml_xpath_context_register_variable(VALUE rb_context, VALUE name, VALUE value) +{ + xmlXPathContextPtr c_context; + xmlXPathObjectPtr xmlValue; + + TypedData_Get_Struct(rb_context, xmlXPathContext, &_noko_xml_xpath_context_type, c_context); + + if (NIL_P(value)) { + xmlValue = NULL; + } else { + xmlValue = xmlXPathNewCString(StringValueCStr(value)); + } + + xmlXPathRegisterVariable(c_context, (const xmlChar *)StringValueCStr(name), xmlValue); + + return rb_context; +} + + +/* + * convert an XPath object into a Ruby object of the appropriate type. + * returns Qundef if no conversion was possible. + */ +static VALUE +_noko_xml_xpath_context__xpath2ruby(xmlXPathObjectPtr c_xpath_object, xmlXPathContextPtr c_context) +{ + VALUE rb_retval; + + assert(c_context->doc); + assert(DOC_RUBY_OBJECT_TEST(c_context->doc)); + + switch (c_xpath_object->type) { + case XPATH_STRING: + rb_retval = NOKOGIRI_STR_NEW2(c_xpath_object->stringval); + xmlFree(c_xpath_object->stringval); + return rb_retval; + + case XPATH_NODESET: + return noko_xml_node_set_wrap( + c_xpath_object->nodesetval, + DOC_RUBY_OBJECT(c_context->doc) + ); + + case XPATH_NUMBER: + return rb_float_new(c_xpath_object->floatval); + + case XPATH_BOOLEAN: + return (c_xpath_object->boolval == 1) ? Qtrue : Qfalse; + + default: + return Qundef; + } +} + +void +Nokogiri_marshal_xpath_funcall_and_return_values( + xmlXPathParserContextPtr ctxt, + int argc, + VALUE rb_xpath_handler, + const char *method_name +) +{ + VALUE rb_retval; + VALUE *argv; + VALUE rb_node_set = Qnil; + xmlNodeSetPtr c_node_set = NULL; + xmlXPathObjectPtr c_xpath_object; + + assert(ctxt->context->doc); + assert(DOC_RUBY_OBJECT_TEST(ctxt->context->doc)); + + argv = (VALUE *)ruby_xcalloc((size_t)argc, sizeof(VALUE)); + for (int j = 0 ; j < argc ; ++j) { + rb_gc_register_address(&argv[j]); + } + + for (int j = argc - 1 ; j >= 0 ; --j) { + c_xpath_object = valuePop(ctxt); + argv[j] = _noko_xml_xpath_context__xpath2ruby(c_xpath_object, ctxt->context); + if (argv[j] == Qundef) { + argv[j] = NOKOGIRI_STR_NEW2(xmlXPathCastToString(c_xpath_object)); + } + xmlXPathFreeNodeSetList(c_xpath_object); + } + + rb_retval = rb_funcall2( + rb_xpath_handler, + rb_intern((const char *)method_name), + argc, + argv + ); + + for (int j = 0 ; j < argc ; ++j) { + rb_gc_unregister_address(&argv[j]); + } + ruby_xfree(argv); + + switch (TYPE(rb_retval)) { + case T_FLOAT: + case T_BIGNUM: + case T_FIXNUM: + xmlXPathReturnNumber(ctxt, NUM2DBL(rb_retval)); + break; + case T_STRING: + xmlXPathReturnString(ctxt, xmlCharStrdup(StringValueCStr(rb_retval))); + break; + case T_TRUE: + xmlXPathReturnTrue(ctxt); + break; + case T_FALSE: + xmlXPathReturnFalse(ctxt); + break; + case T_NIL: + break; + case T_ARRAY: { + VALUE construct_args[2] = { DOC_RUBY_OBJECT(ctxt->context->doc), rb_retval }; + rb_node_set = rb_class_new_instance(2, construct_args, cNokogiriXmlNodeSet); + c_node_set = noko_xml_node_set_unwrap(rb_node_set); + xmlXPathReturnNodeSet(ctxt, xmlXPathNodeSetMerge(NULL, c_node_set)); + } + break; + case T_DATA: + if (rb_obj_is_kind_of(rb_retval, cNokogiriXmlNodeSet)) { + c_node_set = noko_xml_node_set_unwrap(rb_retval); + /* Copy the node set, otherwise it will get GC'd. */ + xmlXPathReturnNodeSet(ctxt, xmlXPathNodeSetMerge(NULL, c_node_set)); + break; + } + default: + rb_raise(rb_eRuntimeError, "Invalid return type"); + } +} + +static void +_noko_xml_xpath_context__handler_invoker(xmlXPathParserContextPtr ctxt, int argc) +{ + VALUE rb_xpath_handler = Qnil; + const char *method_name = NULL ; + + assert(ctxt); + assert(ctxt->context); + assert(ctxt->context->userData); + assert(ctxt->context->function); + + rb_xpath_handler = (VALUE)(ctxt->context->userData); + method_name = (const char *)(ctxt->context->function); + + Nokogiri_marshal_xpath_funcall_and_return_values( + ctxt, + argc, + rb_xpath_handler, + method_name + ); +} + +static xmlXPathFunction +_noko_xml_xpath_context_handler_lookup(void *data, const xmlChar *c_name, const xmlChar *c_ns_uri) +{ + VALUE rb_handler = (VALUE)data; + if (rb_respond_to(rb_handler, rb_intern((const char *)c_name))) { + if (c_ns_uri == NULL) { + NOKO_WARN_DEPRECATION("A custom XPath or CSS handler function named '%s' is being invoked without a namespace. Please update your query to reference this function as 'nokogiri:%s'. Invoking custom handler functions without a namespace is deprecated and will become an error in Nokogiri v1.17.0.", + c_name, c_name); // TODO deprecated in v1.15.0, remove in v1.19.0 + } + return _noko_xml_xpath_context__handler_invoker; + } + + return NULL; +} + +PRINTFLIKE_DECL(2, 3) +static void +_noko_xml_xpath_context__generic_exception_pusher(void *data, const char *msg, ...) +{ + VALUE rb_errors = (VALUE)data; + VALUE rb_message; + VALUE rb_exception; + + Check_Type(rb_errors, T_ARRAY); + +#ifdef TRUFFLERUBY_NOKOGIRI_SYSTEM_LIBRARIES + /* It is not currently possible to pass var args from native + functions to sulong, so we work around the issue here. */ + rb_message = rb_sprintf("_noko_xml_xpath_context__generic_exception_pusher: %s", msg); +#else + va_list args; + va_start(args, msg); + rb_message = rb_vsprintf(msg, args); + va_end(args); +#endif + + rb_exception = rb_exc_new_str(cNokogiriXmlXpathSyntaxError, rb_message); + rb_ary_push(rb_errors, rb_exception); +} + +/* + * call-seq: + * evaluate(search_path, handler = nil) → Object + * + * Evaluate the +search_path+ query. + * + * [Returns] an object of the appropriate type for the query, which could be +NodeSet+, a +String+, + * a +Float+, or a boolean. + */ +static VALUE +noko_xml_xpath_context_evaluate(int argc, VALUE *argv, VALUE rb_context) +{ + xmlXPathContextPtr c_context; + VALUE rb_expression = Qnil; + VALUE rb_function_lookup_handler = Qnil; + xmlChar *c_expression_str = NULL; + VALUE rb_errors = rb_ary_new(); + xmlXPathObjectPtr c_xpath_object; + VALUE rb_xpath_object = Qnil; + + TypedData_Get_Struct(rb_context, xmlXPathContext, &_noko_xml_xpath_context_type, c_context); + + rb_scan_args(argc, argv, "11", &rb_expression, &rb_function_lookup_handler); + + c_expression_str = (xmlChar *)StringValueCStr(rb_expression); + + if (Qnil != rb_function_lookup_handler) { + /* FIXME: not sure if this is the correct place to shove private data. */ + c_context->userData = (void *)rb_function_lookup_handler; + xmlXPathRegisterFuncLookup( + c_context, + _noko_xml_xpath_context_handler_lookup, + (void *)rb_function_lookup_handler + ); + } + + /* TODO: use xmlXPathSetErrorHandler (as of 2.13.0) */ + xmlSetStructuredErrorFunc((void *)rb_errors, noko__error_array_pusher); + xmlSetGenericErrorFunc((void *)rb_errors, _noko_xml_xpath_context__generic_exception_pusher); + + c_xpath_object = xmlXPathEvalExpression(c_expression_str, c_context); + + xmlSetStructuredErrorFunc(NULL, NULL); + xmlSetGenericErrorFunc(NULL, NULL); + + xmlXPathRegisterFuncLookup(c_context, NULL, NULL); + + if (c_xpath_object == NULL) { + rb_exc_raise(rb_ary_entry(rb_errors, 0)); + } + + rb_xpath_object = _noko_xml_xpath_context__xpath2ruby(c_xpath_object, c_context); + if (rb_xpath_object == Qundef) { + rb_xpath_object = noko_xml_node_set_wrap(NULL, DOC_RUBY_OBJECT(c_context->doc)); + } + + xmlXPathFreeNodeSetList(c_xpath_object); + + return rb_xpath_object; +} + +/* + * call-seq: + * new(node) + * + * Create a new XPathContext with +node+ as the context node. + */ +static VALUE +noko_xml_xpath_context_new(VALUE klass, VALUE rb_node) +{ + xmlNodePtr c_node; + xmlXPathContextPtr c_context; + VALUE rb_context; + + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + +#if LIBXML_VERSION < 21000 + xmlXPathInit(); /* deprecated in 40483d0 */ +#endif + + c_context = xmlXPathNewContext(c_node->doc); + c_context->node = c_node; + + xmlXPathRegisterNs(c_context, NOKOGIRI_PREFIX, NOKOGIRI_URI); + xmlXPathRegisterNs(c_context, NOKOGIRI_BUILTIN_PREFIX, NOKOGIRI_BUILTIN_URI); + + xmlXPathRegisterFuncNS(c_context, + (const xmlChar *)"css-class", NOKOGIRI_BUILTIN_URI, + noko_xml_xpath_context_xpath_func_css_class); + xmlXPathRegisterFuncNS(c_context, + (const xmlChar *)"local-name-is", NOKOGIRI_BUILTIN_URI, + noko_xml_xpath_context_xpath_func_local_name_is); + + rb_context = TypedData_Wrap_Struct(klass, &_noko_xml_xpath_context_type, c_context); + + return rb_context; +} + + +/* :nodoc: */ +static VALUE +noko_xml_xpath_context_set_node(VALUE rb_context, VALUE rb_node) +{ + xmlNodePtr c_node; + xmlXPathContextPtr c_context; + + TypedData_Get_Struct(rb_context, xmlXPathContext, &_noko_xml_xpath_context_type, c_context); + Noko_Node_Get_Struct(rb_node, xmlNode, c_node); + + c_context->doc = c_node->doc; + c_context->node = c_node; + + return rb_node; +} + +void +noko_init_xml_xpath_context(void) +{ + /* + * XPathContext is the entry point for searching a +Document+ by using XPath. + */ + cNokogiriXmlXpathContext = rb_define_class_under(mNokogiriXml, "XPathContext", rb_cObject); + + rb_undef_alloc_func(cNokogiriXmlXpathContext); + + rb_define_singleton_method(cNokogiriXmlXpathContext, "new", noko_xml_xpath_context_new, 1); + + rb_define_method(cNokogiriXmlXpathContext, "evaluate", noko_xml_xpath_context_evaluate, -1); + rb_define_method(cNokogiriXmlXpathContext, "register_variable", noko_xml_xpath_context_register_variable, 2); + rb_define_method(cNokogiriXmlXpathContext, "register_ns", noko_xml_xpath_context_register_ns, 2); + rb_define_method(cNokogiriXmlXpathContext, "node=", noko_xml_xpath_context_set_node, 1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xslt_stylesheet.c b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xslt_stylesheet.c new file mode 100644 index 000000000..903f729ca --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/ext/nokogiri/xslt_stylesheet.c @@ -0,0 +1,421 @@ +#include + +VALUE cNokogiriXsltStylesheet; + +static void +mark(void *data) +{ + nokogiriXsltStylesheetTuple *wrapper = (nokogiriXsltStylesheetTuple *)data; + rb_gc_mark(wrapper->func_instances); +} + +static void +dealloc(void *data) +{ + nokogiriXsltStylesheetTuple *wrapper = (nokogiriXsltStylesheetTuple *)data; + xsltStylesheetPtr doc = wrapper->ss; + xsltFreeStylesheet(doc); + ruby_xfree(wrapper); +} + +static const rb_data_type_t nokogiri_xslt_stylesheet_tuple_type = { + .wrap_struct_name = "nokogiriXsltStylesheetTuple", + .function = { + .dmark = mark, + .dfree = dealloc, + }, + .flags = RUBY_TYPED_FREE_IMMEDIATELY +}; + +PRINTFLIKE_DECL(2, 3) +static void +xslt_generic_error_handler(void *ctx, const char *msg, ...) +{ + VALUE message; + +#ifdef TRUFFLERUBY_NOKOGIRI_SYSTEM_LIBRARIES + /* It is not currently possible to pass var args from native + functions to sulong, so we work around the issue here. */ + message = rb_sprintf("xslt_generic_error_handler: %s", msg); +#else + va_list args; + va_start(args, msg); + message = rb_vsprintf(msg, args); + va_end(args); +#endif + + rb_str_concat((VALUE)ctx, message); +} + +VALUE +Nokogiri_wrap_xslt_stylesheet(xsltStylesheetPtr ss) +{ + VALUE self; + nokogiriXsltStylesheetTuple *wrapper; + + self = TypedData_Make_Struct( + cNokogiriXsltStylesheet, + nokogiriXsltStylesheetTuple, + &nokogiri_xslt_stylesheet_tuple_type, + wrapper + ); + + ss->_private = (void *)self; + wrapper->ss = ss; + wrapper->func_instances = rb_ary_new(); + + return self; +} + +/* + * call-seq: + * parse_stylesheet_doc(document) + * + * Parse an XSLT::Stylesheet from +document+. + * + * [Parameters] + * - +document+ (Nokogiri::XML::Document) the document to be parsed. + * + * [Returns] Nokogiri::XSLT::Stylesheet + */ +static VALUE +parse_stylesheet_doc(VALUE klass, VALUE xmldocobj) +{ + xmlDocPtr xml, xml_cpy; + VALUE errstr, exception; + xsltStylesheetPtr ss ; + + xml = noko_xml_document_unwrap(xmldocobj); + + errstr = rb_str_new(0, 0); + xsltSetGenericErrorFunc((void *)errstr, xslt_generic_error_handler); + + xml_cpy = xmlCopyDoc(xml, 1); /* 1 => recursive */ + ss = xsltParseStylesheetDoc(xml_cpy); + + xsltSetGenericErrorFunc(NULL, NULL); + + if (!ss) { + xmlFreeDoc(xml_cpy); + exception = rb_exc_new3(rb_eRuntimeError, errstr); + rb_exc_raise(exception); + } + + return Nokogiri_wrap_xslt_stylesheet(ss); +} + + +/* + * call-seq: + * serialize(document) + * + * Serialize +document+ to an xml string, as specified by the +method+ parameter in the Stylesheet. + */ +static VALUE +rb_xslt_stylesheet_serialize(VALUE self, VALUE xmlobj) +{ + xmlDocPtr xml ; + nokogiriXsltStylesheetTuple *wrapper; + xmlChar *doc_ptr ; + int doc_len ; + VALUE rval ; + + xml = noko_xml_document_unwrap(xmlobj); + TypedData_Get_Struct( + self, + nokogiriXsltStylesheetTuple, + &nokogiri_xslt_stylesheet_tuple_type, + wrapper + ); + xsltSaveResultToString(&doc_ptr, &doc_len, xml, wrapper->ss); + rval = NOKOGIRI_STR_NEW(doc_ptr, doc_len); + xmlFree(doc_ptr); + return rval ; +} + +/* + * call-seq: + * transform(document) + * transform(document, params = {}) + * + * Transform an XML::Document as defined by an XSLT::Stylesheet. + * + * [Parameters] + * - +document+ (Nokogiri::XML::Document) the document to be transformed. + * - +params+ (Hash, Array) strings used as XSLT parameters. + * + * [Returns] Nokogiri::XML::Document + * + * *Example* of basic transformation: + * + * xslt = <<~XSLT + * + * + * + * + * + * + * + *

+ *
    + * + *
  1. + *
    + *
+ * + * + *
+ * XSLT + * + * xml = <<~XML + * + * + * + * EMP0001 + * Accountant + * + * + * EMP0002 + * Developer + * + * + * XML + * + * doc = Nokogiri::XML::Document.parse(xml) + * stylesheet = Nokogiri::XSLT.parse(xslt) + * + * ⚠ Note that the +h1+ element is empty because no param has been provided! + * + * stylesheet.transform(doc).to_xml + * # => "\n" + + * # "

\n" + + * # "
    \n" + + * # "
  1. EMP0001
  2. \n" + + * # "
  3. EMP0002
  4. \n" + + * # "
\n" + + * # "\n" + * + * *Example* of using an input parameter hash: + * + * ⚠ The title is populated, but note how we need to quote-escape the value. + * + * stylesheet.transform(doc, { "title" => "'Employee List'" }).to_xml + * # => "\n" + + * # "

Employee List

\n" + + * # "
    \n" + + * # "
  1. EMP0001
  2. \n" + + * # "
  3. EMP0002
  4. \n" + + * # "
\n" + + * # "\n" + * + * *Example* using the XSLT.quote_params helper method to safely quote-escape strings: + * + * stylesheet.transform(doc, Nokogiri::XSLT.quote_params({ "title" => "Aaron's List" })).to_xml + * # => "\n" + + * # "

Aaron's List

\n" + + * # "
    \n" + + * # "
  1. EMP0001
  2. \n" + + * # "
  3. EMP0002
  4. \n" + + * # "
\n" + + * # "\n" + * + * *Example* using an array of XSLT parameters + * + * You can also use an array if you want to. + * + * stylesheet.transform(doc, ["title", "'Employee List'"]).to_xml + * # => "\n" + + * # "

Employee List

\n" + + * # "
    \n" + + * # "
  1. EMP0001
  2. \n" + + * # "
  3. EMP0002
  4. \n" + + * # "
\n" + + * # "\n" + * + * Or pass an array to XSLT.quote_params: + * + * stylesheet.transform(doc, Nokogiri::XSLT.quote_params(["title", "Aaron's List"])).to_xml + * # => "\n" + + * # "

Aaron's List

\n" + + * # "
    \n" + + * # "
  1. EMP0001
  2. \n" + + * # "
  3. EMP0002
  4. \n" + + * # "
\n" + + * # "\n" + * + * See: Nokogiri::XSLT.quote_params + */ +static VALUE +rb_xslt_stylesheet_transform(int argc, VALUE *argv, VALUE self) +{ + VALUE rb_document, rb_param, rb_error_str; + xmlDocPtr c_document ; + xmlDocPtr c_result_document ; + nokogiriXsltStylesheetTuple *wrapper; + const char **params ; + long param_len, j ; + int parse_error_occurred ; + int defensive_copy_p = 0; + + rb_scan_args(argc, argv, "11", &rb_document, &rb_param); + if (NIL_P(rb_param)) { rb_param = rb_ary_new2(0L) ; } + if (!rb_obj_is_kind_of(rb_document, cNokogiriXmlDocument)) { + rb_raise(rb_eArgError, "argument must be a Nokogiri::XML::Document"); + } + + /* handle hashes as arguments. */ + if (T_HASH == TYPE(rb_param)) { + rb_param = rb_funcall(rb_param, rb_intern("to_a"), 0); + rb_param = rb_funcall(rb_param, rb_intern("flatten"), 0); + } + + Check_Type(rb_param, T_ARRAY); + + c_document = noko_xml_document_unwrap(rb_document); + TypedData_Get_Struct(self, nokogiriXsltStylesheetTuple, &nokogiri_xslt_stylesheet_tuple_type, wrapper); + + param_len = RARRAY_LEN(rb_param); + params = ruby_xcalloc((size_t)param_len + 1, sizeof(char *)); + for (j = 0 ; j < param_len ; j++) { + VALUE entry = rb_ary_entry(rb_param, j); + const char *ptr = StringValueCStr(entry); + params[j] = ptr; + } + params[param_len] = 0 ; + + xsltTransformContextPtr c_transform_context = xsltNewTransformContext(wrapper->ss, c_document); + if (xsltNeedElemSpaceHandling(c_transform_context) && + noko_xml_document_has_wrapped_blank_nodes_p(c_document)) { + // see https://github.com/sparklemotion/nokogiri/issues/2800 + c_document = xmlCopyDoc(c_document, 1); + defensive_copy_p = 1; + } + xsltFreeTransformContext(c_transform_context); + + rb_error_str = rb_str_new(0, 0); + xsltSetGenericErrorFunc((void *)rb_error_str, xslt_generic_error_handler); + xmlSetGenericErrorFunc((void *)rb_error_str, xslt_generic_error_handler); + + c_result_document = xsltApplyStylesheet(wrapper->ss, c_document, params); + + ruby_xfree(params); + if (defensive_copy_p) { + xmlFreeDoc(c_document); + c_document = NULL; + } + + xsltSetGenericErrorFunc(NULL, NULL); + xmlSetGenericErrorFunc(NULL, NULL); + + parse_error_occurred = (Qfalse == rb_funcall(rb_error_str, rb_intern("empty?"), 0)); + + if (parse_error_occurred) { + rb_exc_raise(rb_exc_new3(rb_eRuntimeError, rb_error_str)); + } + + return noko_xml_document_wrap((VALUE)0, c_result_document) ; +} + +static void +method_caller(xmlXPathParserContextPtr ctxt, int nargs) +{ + VALUE handler; + const char *function_name; + xsltTransformContextPtr transform; + const xmlChar *functionURI; + + transform = xsltXPathGetTransformContext(ctxt); + functionURI = ctxt->context->functionURI; + handler = (VALUE)xsltGetExtData(transform, functionURI); + function_name = (const char *)(ctxt->context->function); + + Nokogiri_marshal_xpath_funcall_and_return_values( + ctxt, + nargs, + handler, + (const char *)function_name + ); +} + +static void * +initFunc(xsltTransformContextPtr ctxt, const xmlChar *uri) +{ + VALUE modules = rb_iv_get(mNokogiriXslt, "@modules"); + VALUE obj = rb_hash_aref(modules, rb_str_new2((const char *)uri)); + VALUE args = { Qfalse }; + VALUE methods = rb_funcall(obj, rb_intern("instance_methods"), 1, args); + VALUE inst; + nokogiriXsltStylesheetTuple *wrapper; + int i; + + for (i = 0; i < RARRAY_LEN(methods); i++) { + VALUE method_name = rb_obj_as_string(rb_ary_entry(methods, i)); + xsltRegisterExtFunction( + ctxt, + (unsigned char *)StringValueCStr(method_name), + uri, + method_caller + ); + } + + TypedData_Get_Struct( + (VALUE)ctxt->style->_private, + nokogiriXsltStylesheetTuple, + &nokogiri_xslt_stylesheet_tuple_type, + wrapper + ); + inst = rb_class_new_instance(0, NULL, obj); + rb_ary_push(wrapper->func_instances, inst); + + return (void *)inst; +} + +static void +shutdownFunc(xsltTransformContextPtr ctxt, + const xmlChar *uri, void *data) +{ + nokogiriXsltStylesheetTuple *wrapper; + + TypedData_Get_Struct( + (VALUE)ctxt->style->_private, + nokogiriXsltStylesheetTuple, + &nokogiri_xslt_stylesheet_tuple_type, + wrapper + ); + + rb_ary_clear(wrapper->func_instances); +} + +/* docstring is in lib/nokogiri/xslt.rb */ +static VALUE +rb_xslt_s_register(VALUE self, VALUE uri, VALUE obj) +{ + VALUE modules = rb_iv_get(self, "@modules"); + if (NIL_P(modules)) { + rb_raise(rb_eRuntimeError, "internal error: @modules not set"); + } + + rb_hash_aset(modules, uri, obj); + xsltRegisterExtModule( + (unsigned char *)StringValueCStr(uri), + initFunc, + shutdownFunc + ); + return self; +} + +void +noko_init_xslt_stylesheet(void) +{ + rb_define_singleton_method(mNokogiriXslt, "register", rb_xslt_s_register, 2); + rb_iv_set(mNokogiriXslt, "@modules", rb_hash_new()); + + cNokogiriXsltStylesheet = rb_define_class_under(mNokogiriXslt, "Stylesheet", rb_cObject); + + rb_undef_alloc_func(cNokogiriXsltStylesheet); + + rb_define_singleton_method(cNokogiriXsltStylesheet, "parse_stylesheet_doc", parse_stylesheet_doc, 1); + rb_define_method(cNokogiriXsltStylesheet, "serialize", rb_xslt_stylesheet_serialize, 1); + rb_define_method(cNokogiriXsltStylesheet, "transform", rb_xslt_stylesheet_transform, -1); +} diff --git a/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/gumbo-parser/CHANGES.md b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/gumbo-parser/CHANGES.md new file mode 100644 index 000000000..277b3a2bb --- /dev/null +++ b/vendor/bundle/ruby/3.2.0/gems/nokogiri-1.18.9-x86_64-linux-gnu/gumbo-parser/CHANGES.md @@ -0,0 +1,63 @@ +## Gumbo 0.10.1 (2015-04-30) + +Same as 0.10.0, but with the version number bumped because the last version-number commit to v0.9.4 makes GitHub think that v0.9.4 is the latest version and so it's not highlighted on the webpage. + +## Gumbo 0.10.0 (2015-04-30) + +* Full support for `